From 7d8c724346efe9c5fe6ad39b0997e9def76bef40 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Tue, 16 Sep 2025 16:21:06 +1000 Subject: [PATCH 1/6] Add generic data source template --- Makefile | 2 +- templates/data-sources.md.tmpl | 57 +++++++++++++++++++++++++++++ templates/resources.md.tmpl | 65 ++++++++++++++++++++++++++++++++++ 3 files changed, 123 insertions(+), 1 deletion(-) create mode 100644 templates/data-sources.md.tmpl create mode 100644 templates/resources.md.tmpl diff --git a/Makefile b/Makefile index bac041a4c..d94bcf925 100644 --- a/Makefile +++ b/Makefile @@ -225,7 +225,7 @@ docker-clean: ## Try to remove provisioned nodes and assigned network .PHONY: docs-generate docs-generate: tools ## Generate documentation for the provider - @ go tool github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate --provider-name elasticstack + @ go tool github.com/hashicorp/terraform-plugin-docs/cmd/tfplugindocs generate --provider-name terraform-provider-elasticstack .PHONY: gen diff --git a/templates/data-sources.md.tmpl b/templates/data-sources.md.tmpl new file mode 100644 index 000000000..d99da856b --- /dev/null +++ b/templates/data-sources.md.tmpl @@ -0,0 +1,57 @@ +{{/* Automatic subcategory */}} +{{- $nameParts := split .Name "_" -}} +{{- $stackComponent := index $nameParts 1 | title -}} +{{- $subcategory := $stackComponent -}} +{{ if and (eq $stackComponent "Elasticsearch") (gt (len $nameParts) 3) -}} + {{- $subcategory = index $nameParts 2 | title -}} +{{- end -}} + +{{/* Subcategory overrides */}} +{{ if eq .Name "elasticstack_elasticsearch_indices" -}} + {{- $subcategory = "Index" -}} +{{- end -}} +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "{{.Name}} {{.Type}} - {{.RenderedProviderName}}" +subcategory: "{{ $subcategory }}" +description: |- +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +--- + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{tffile .ExampleFile }} +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if or .HasImport .HasImportIDConfig .HasImportIdentityConfig }} + +## Import + +Import is supported using the following syntax: +{{- end }} +{{- if .HasImportIdentityConfig }} + +In Terraform v1.12.0 and later, the [`import` block](https://developer.hashicorp.com/terraform/language/import) can be used with the `identity` attribute, for example: + +{{tffile .ImportIdentityConfigFile }} + +{{ .IdentitySchemaMarkdown | trimspace }} +{{- end }} +{{- if .HasImportIDConfig }} + +In Terraform v1.5.0 and later, the [`import` block](https://developer.hashicorp.com/terraform/language/import) can be used with the `id` attribute, for example: + +{{tffile .ImportIDConfigFile }} +{{- end }} +{{- if .HasImport }} + +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + +{{codefile "shell" .ImportFile }} +{{- end }} diff --git a/templates/resources.md.tmpl b/templates/resources.md.tmpl new file mode 100644 index 000000000..ef099e3bb --- /dev/null +++ b/templates/resources.md.tmpl @@ -0,0 +1,65 @@ +{{/* Automatic subcategory */}} +{{- $nameParts := split .Name "_" -}} +{{- $stackComponent := index $nameParts 1 | title -}} +{{- $subcategory := $stackComponent -}} +{{ if and (eq $stackComponent "Elasticsearch") (gt (len $nameParts) 3) -}} + {{- $subcategory = index $nameParts 2 | title -}} +{{- end -}} + +{{ if eq $subcategory "Apm" -}} + {{- $subcategory = "APM" -}} +{{- end -}} + +{{/* Subcategory overrides */}} +{{ if or + (eq .Name "elasticstack_elasticsearch_component_template") + (eq .Name "elasticstack_elasticsearch_data_stream_lifecycle") + (eq .Name "elasticstack_elasticsearch_data_stream") + (eq .Name "elasticstack_elasticsearch_index") -}} + {{- $subcategory = "Index" -}} +{{- end -}} +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "{{.Name}} {{.Type}} - {{.RenderedProviderName}}" +subcategory: "{{ $subcategory }}" +description: |- +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +--- + +# {{.Name}} ({{.Type}}) + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{tffile .ExampleFile }} +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if or .HasImport .HasImportIDConfig .HasImportIdentityConfig }} + +## Import + +Import is supported using the following syntax: +{{- end }} +{{- if .HasImportIdentityConfig }} + +In Terraform v1.12.0 and later, the [`import` block](https://developer.hashicorp.com/terraform/language/import) can be used with the `identity` attribute, for example: + +{{tffile .ImportIdentityConfigFile }} + +{{ .IdentitySchemaMarkdown | trimspace }} +{{- end }} +{{- if .HasImportIDConfig }} + +In Terraform v1.5.0 and later, the [`import` block](https://developer.hashicorp.com/terraform/language/import) can be used with the ` + "`" + `id` + "`" + ` attribute, for example: + +{{tffile .ImportIDConfigFile }} +{{- end }} +{{- if .HasImport }} + +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + +{{codefile "shell" .ImportFile }} +{{- end }} From 8170b699d449300ded898691e90c2d3369c42961 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Wed, 17 Sep 2025 10:03:29 +1000 Subject: [PATCH 2/6] Update code level descriptions from the existing templates --- internal/apm/agent_configuration/schema.go | 2 +- .../elasticsearch/index/indices/schema.go | 2 +- .../index/template_data_source.go | 2 +- .../ingest/processor_append_data_source.go | 2 +- .../ingest/processor_bytes_data_source.go | 6 +++++- .../ingest/processor_bytes_data_source.md | 5 +++++ .../ingest/processor_circle_data_source.go | 2 +- .../processor_community_id_data_source.go | 7 ++++++- .../processor_community_id_data_source.md | 4 ++++ .../ingest/processor_convert_data_source.go | 8 ++++++-- .../ingest/processor_convert_data_source.md | 17 +++++++++++++++++ .../ingest/processor_csv_data_source.go | 7 ++++++- .../ingest/processor_csv_data_source.md | 3 +++ .../ingest/processor_date_data_source.go | 7 +++++-- .../ingest/processor_date_data_source.md | 3 +++ .../processor_date_index_name_data_source.go | 6 +++++- .../processor_date_index_name_data_source.md | 5 +++++ .../ingest/processor_dissect_data_source.go | 6 +++++- .../ingest/processor_dissect_data_source.md | 5 +++++ .../processor_dissect_dot_expander_source.go | 2 +- .../ingest/processor_drop_data_source.go | 2 +- .../ingest/processor_enrich_data_source.go | 2 +- .../ingest/processor_fail_data_source.go | 2 +- .../processor_fingerprint_data_source.go | 2 +- .../ingest/processor_foreach_data_source.go | 6 +++++- .../ingest/processor_foreach_data_source.md | 13 +++++++++++++ .../ingest/processor_geoip_data_source.go | 6 +++++- .../ingest/processor_geoip_data_source.md | 7 +++++++ .../ingest/processor_grok_data_source.go | 6 +++++- .../ingest/processor_grok_data_source.md | 5 +++++ .../ingest/processor_gsub_data_source.go | 2 +- .../processor_html_strip_data_source.go | 2 +- .../ingest/processor_join_data_source.go | 2 +- .../ingest/processor_json_data_source.go | 2 +- .../ingest/processor_kv_data_source.go | 2 +- .../ingest/processor_lowercase_data_source.go | 2 +- ...processor_network_direction_data_source.go | 8 ++++++-- ...processor_network_direction_data_source.md | 19 +++++++++++++++++++ .../ingest/processor_pipeline_data_source.go | 2 +- ...processor_registered_domain_data_source.go | 2 +- .../ingest/processor_remove_data_source.go | 2 +- .../ingest/processor_rename_data_source.go | 2 +- .../ingest/processor_reroute_data_source.go | 2 +- .../ingest/processor_script_data_source.go | 2 +- .../ingest/processor_script_data_source.md | 11 +++++++++++ .../ingest/processor_set_data_source.go | 2 +- ...processor_set_security_user_data_source.go | 2 +- ...processor_set_security_user_data_source.md | 8 ++++++++ .../ingest/processor_sort_data_source.go | 2 +- .../ingest/processor_split_data_source.go | 2 +- .../ingest/processor_trim_data_source.go | 2 +- .../ingest/processor_uppercase_data_source.go | 2 +- .../ingest/processor_uri_parts_data_source.go | 2 +- .../ingest/processor_urldecode_data_source.go | 2 +- .../processor_user_agent_data_source.go | 2 +- .../system_user/resource-description.md | 3 +++ .../security/system_user/schema.go | 6 +++++- internal/elasticsearch/transform/transform.go | 6 +++++- internal/elasticsearch/transform/transform.md | 3 +++ internal/fleet/integration/schema.go | 7 ++++++- internal/fleet/integration_ds/schema.go | 11 ++++++++++- .../resource-description.md | 10 ++++++++++ internal/fleet/integration_policy/schema.go | 6 +++++- internal/kibana/alerting.go | 7 +++++-- internal/kibana/alerting.md | 7 +++++++ internal/kibana/data_view/schema.go | 2 +- .../kibana/import_saved_objects/schema.go | 2 +- internal/kibana/maintenance_window/schema.go | 2 +- internal/kibana/role.go | 7 ++++++- internal/kibana/role.md | 5 +++++ internal/kibana/slo.go | 2 +- internal/kibana/spaces/schema.go | 2 +- .../parameter/resource-description.md | 4 ++++ .../kibana/synthetics/parameter/schema.go | 6 +++++- .../private_location/resource-description.md | 4 ++++ .../synthetics/private_location/schema.go | 6 +++++- .../kibana/synthetics/resource-description.md | 12 ++++++++++++ internal/kibana/synthetics/schema.go | 6 +++++- 78 files changed, 305 insertions(+), 61 deletions(-) create mode 100644 internal/elasticsearch/ingest/processor_bytes_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_community_id_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_convert_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_csv_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_date_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_date_index_name_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_dissect_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_foreach_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_geoip_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_grok_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_network_direction_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_script_data_source.md create mode 100644 internal/elasticsearch/ingest/processor_set_security_user_data_source.md create mode 100644 internal/elasticsearch/security/system_user/resource-description.md create mode 100644 internal/elasticsearch/transform/transform.md create mode 100644 internal/fleet/integration_policy/resource-description.md create mode 100644 internal/kibana/alerting.md create mode 100644 internal/kibana/role.md create mode 100644 internal/kibana/synthetics/parameter/resource-description.md create mode 100644 internal/kibana/synthetics/private_location/resource-description.md create mode 100644 internal/kibana/synthetics/resource-description.md diff --git a/internal/apm/agent_configuration/schema.go b/internal/apm/agent_configuration/schema.go index f3d73269f..79f6ee5d8 100644 --- a/internal/apm/agent_configuration/schema.go +++ b/internal/apm/agent_configuration/schema.go @@ -12,7 +12,7 @@ import ( func (r *resourceAgentConfiguration) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { resp.Schema = schema.Schema{ - Description: "Manages APM agent configuration.", + Description: "Creates or updates an APM agent configuration. See https://www.elastic.co/docs/solutions/observability/apm/apm-agent-central-configuration.", Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{ diff --git a/internal/elasticsearch/index/indices/schema.go b/internal/elasticsearch/index/indices/schema.go index 17ba20a30..565529d25 100644 --- a/internal/elasticsearch/index/indices/schema.go +++ b/internal/elasticsearch/index/indices/schema.go @@ -21,7 +21,7 @@ func (d *dataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp func getSchema() schema.Schema { return schema.Schema{ - Description: "Manages Elasticsearch indices. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-index.html", + Description: "Retrieves information about existing Elasticsearch indices. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-index.html", Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{ Description: "Generated ID for the indices.", diff --git a/internal/elasticsearch/index/template_data_source.go b/internal/elasticsearch/index/template_data_source.go index 14bc02c37..b512d49ef 100644 --- a/internal/elasticsearch/index/template_data_source.go +++ b/internal/elasticsearch/index/template_data_source.go @@ -161,7 +161,7 @@ func DataSourceTemplate() *schema.Resource { utils.AddConnectionSchema(templateSchema) return &schema.Resource{ - Description: "Retrieves index template definition. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-template.html", + Description: "Retrieves information about an existing index template definition. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-template.html", ReadContext: dataSourceIndexTemplateRead, Schema: templateSchema, } diff --git a/internal/elasticsearch/ingest/processor_append_data_source.go b/internal/elasticsearch/ingest/processor_append_data_source.go index eb94b29a8..18182f6d7 100644 --- a/internal/elasticsearch/ingest/processor_append_data_source.go +++ b/internal/elasticsearch/ingest/processor_append_data_source.go @@ -84,7 +84,7 @@ func DataSourceProcessorAppend() *schema.Resource { } return &schema.Resource{ - Description: "Appends one or more values to an existing array if the field already exists and it is an array. Converts a scalar to an array and appends one or more values to it if the field exists and it is a scalar. Creates an array containing the provided values if the field doesn’t exist. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/append-processor.html", + Description: "Helper data source which can be used to create the configuration for an append processor. This processor appends one or more values to an existing array if the field already exists and it is an array. Converts a scalar to an array and appends one or more values to it if the field exists and it is a scalar. Creates an array containing the provided values if the field doesn’t exist. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/append-processor.html", ReadContext: dataSourceProcessorAppendRead, diff --git a/internal/elasticsearch/ingest/processor_bytes_data_source.go b/internal/elasticsearch/ingest/processor_bytes_data_source.go index 7d9a65dbe..b3aa68adf 100644 --- a/internal/elasticsearch/ingest/processor_bytes_data_source.go +++ b/internal/elasticsearch/ingest/processor_bytes_data_source.go @@ -2,6 +2,7 @@ package ingest import ( "context" + _ "embed" "encoding/json" "strings" @@ -12,6 +13,9 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) +//go:embed processor_bytes_data_source.md +var bytesDataSourceDescription string + func DataSourceProcessorBytes() *schema.Resource { processorSchema := map[string]*schema.Schema{ "id": { @@ -75,7 +79,7 @@ func DataSourceProcessorBytes() *schema.Resource { } return &schema.Resource{ - Description: "Converts a human readable byte value (e.g. 1kb) to its value in bytes (e.g. 1024). See: https://www.elastic.co/guide/en/elasticsearch/reference/current/bytes-processor.html", + Description: bytesDataSourceDescription, ReadContext: dataSourceProcessorBytesRead, diff --git a/internal/elasticsearch/ingest/processor_bytes_data_source.md b/internal/elasticsearch/ingest/processor_bytes_data_source.md new file mode 100644 index 000000000..7755b60ae --- /dev/null +++ b/internal/elasticsearch/ingest/processor_bytes_data_source.md @@ -0,0 +1,5 @@ +Helper data source which can be used to create the configuration for a bytes processor. The processor converts a human readable byte value (e.g. 1kb) to its value in bytes (e.g. 1024). See: https://www.elastic.co/guide/en/elasticsearch/reference/current/bytes-processor.html + +If the field is an array of strings, all members of the array will be converted. + +Supported human readable units are "b", "kb", "mb", "gb", "tb", "pb" case insensitive. An error will occur if the field is not a supported format or resultant value exceeds 2^63. \ No newline at end of file diff --git a/internal/elasticsearch/ingest/processor_circle_data_source.go b/internal/elasticsearch/ingest/processor_circle_data_source.go index 24a12d5fd..d1f8ed531 100644 --- a/internal/elasticsearch/ingest/processor_circle_data_source.go +++ b/internal/elasticsearch/ingest/processor_circle_data_source.go @@ -86,7 +86,7 @@ func DataSourceProcessorCircle() *schema.Resource { } return &schema.Resource{ - Description: "Converts circle definitions of shapes to regular polygons which approximate them. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-circle-processor.html", + Description: "Helper data source which can be used to create the configuration for an circle processor. This processor converts circle definitions of shapes to regular polygons which approximate them. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-circle-processor.html", ReadContext: dataSourceProcessorCircleRead, diff --git a/internal/elasticsearch/ingest/processor_community_id_data_source.go b/internal/elasticsearch/ingest/processor_community_id_data_source.go index a17b38d11..999077116 100644 --- a/internal/elasticsearch/ingest/processor_community_id_data_source.go +++ b/internal/elasticsearch/ingest/processor_community_id_data_source.go @@ -5,6 +5,8 @@ import ( "encoding/json" "strings" + _ "embed" + "github.com/elastic/terraform-provider-elasticstack/internal/models" "github.com/elastic/terraform-provider-elasticstack/internal/utils" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -12,6 +14,9 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) +//go:embed processor_community_id_data_source.md +var communityIdDataSourceDescription string + func DataSourceProcessorCommunityId() *schema.Resource { processorSchema := map[string]*schema.Schema{ "id": { @@ -117,7 +122,7 @@ func DataSourceProcessorCommunityId() *schema.Resource { } return &schema.Resource{ - Description: "Computes the Community ID for network flow data as defined in the Community ID Specification. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/community-id-processor.html", + Description: communityIdDataSourceDescription, ReadContext: dataSourceProcessorCommunityIdRead, diff --git a/internal/elasticsearch/ingest/processor_community_id_data_source.md b/internal/elasticsearch/ingest/processor_community_id_data_source.md new file mode 100644 index 000000000..3a998751a --- /dev/null +++ b/internal/elasticsearch/ingest/processor_community_id_data_source.md @@ -0,0 +1,4 @@ +Helper data source which can be used to create the configuration for a community ID processor. This processor computes the Community ID for network flow data as defined in the Community ID Specification. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/community-id-processor.html +You can use a community ID to correlate network events related to a single flow. + +The community ID processor reads network flow data from related [Elastic Common Schema (ECS)](https://www.elastic.co/guide/en/ecs/1.12) fields by default. If you use the ECS, no configuration is required. diff --git a/internal/elasticsearch/ingest/processor_convert_data_source.go b/internal/elasticsearch/ingest/processor_convert_data_source.go index 323f297e3..8c1b40408 100644 --- a/internal/elasticsearch/ingest/processor_convert_data_source.go +++ b/internal/elasticsearch/ingest/processor_convert_data_source.go @@ -5,6 +5,8 @@ import ( "encoding/json" "strings" + _ "embed" + "github.com/elastic/terraform-provider-elasticstack/internal/models" "github.com/elastic/terraform-provider-elasticstack/internal/utils" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -12,6 +14,9 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) +//go:embed processor_convert_data_source.md +var convertDataSourceDescription string + func DataSourceProcessorConvert() *schema.Resource { processorSchema := map[string]*schema.Schema{ "id": { @@ -81,8 +86,7 @@ func DataSourceProcessorConvert() *schema.Resource { } return &schema.Resource{ - Description: "Converts a field in the currently ingested document to a different type, such as converting a string to an integer. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/convert-processor.html", - + Description: convertDataSourceDescription, ReadContext: dataSourceProcessorConvertRead, Schema: processorSchema, diff --git a/internal/elasticsearch/ingest/processor_convert_data_source.md b/internal/elasticsearch/ingest/processor_convert_data_source.md new file mode 100644 index 000000000..2fbc05988 --- /dev/null +++ b/internal/elasticsearch/ingest/processor_convert_data_source.md @@ -0,0 +1,17 @@ +Helper data source which can be used to create the configuration for a convert processor. This processor converts a field in the currently ingested document to a different type, such as converting a string to an integer. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/convert-processor.html + +The supported types include: +- `integer` +- `long` +- `float` +- `double` +- `string` +- `boolean` +- `ip` +- `auto` + +Specifying `boolean` will set the field to true if its string value is equal to true (ignoring case), to false if its string value is equal to false (ignoring case), or it will throw an exception otherwise. + +Specifying `ip` will set the target field to the value of `field` if it contains a valid IPv4 or IPv6 address that can be indexed into an IP field type. + +Specifying `auto` will attempt to convert the string-valued `field` into the closest non-string, non-IP type. For example, a field whose value is "true" will be converted to its respective boolean type: true. Do note that float takes precedence of double in auto. A value of "242.15" will "automatically" be converted to 242.15 of type `float`. If a provided field cannot be appropriately converted, the processor will still process successfully and leave the field value as-is. In such a case, `target_field` will be updated with the unconverted field value. \ No newline at end of file diff --git a/internal/elasticsearch/ingest/processor_csv_data_source.go b/internal/elasticsearch/ingest/processor_csv_data_source.go index 11a031ecc..c1bf7f1e2 100644 --- a/internal/elasticsearch/ingest/processor_csv_data_source.go +++ b/internal/elasticsearch/ingest/processor_csv_data_source.go @@ -5,6 +5,8 @@ import ( "encoding/json" "strings" + _ "embed" + "github.com/elastic/terraform-provider-elasticstack/internal/models" "github.com/elastic/terraform-provider-elasticstack/internal/utils" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -12,6 +14,9 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) +//go:embed processor_csv_data_source.md +var csvDataSourceDescription string + func DataSourceProcessorCSV() *schema.Resource { processorSchema := map[string]*schema.Schema{ "id": { @@ -102,7 +107,7 @@ func DataSourceProcessorCSV() *schema.Resource { } return &schema.Resource{ - Description: "Extracts fields from CSV line out of a single text field within a document. Any empty field in CSV will be skipped. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/csv-processor.html", + Description: csvDataSourceDescription, ReadContext: dataSourceProcessorCSVRead, diff --git a/internal/elasticsearch/ingest/processor_csv_data_source.md b/internal/elasticsearch/ingest/processor_csv_data_source.md new file mode 100644 index 000000000..51be8c6c0 --- /dev/null +++ b/internal/elasticsearch/ingest/processor_csv_data_source.md @@ -0,0 +1,3 @@ +Helper data source which can be used to create the configuration for a CSV processor. This processor extracts fields from CSV line out of a single text field within a document. Any empty field in CSV will be skipped. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/csv-processor.html + +If the `trim` option is enabled then any whitespace in the beginning and in the end of each unquoted field will be trimmed. For example with configuration above, a value of A, B will result in field field2 having value {nbsp}B (with space at the beginning). If trim is enabled A, B will result in field field2 having value B (no whitespace). Quoted fields will be left untouched. diff --git a/internal/elasticsearch/ingest/processor_date_data_source.go b/internal/elasticsearch/ingest/processor_date_data_source.go index d818db057..f69bed126 100644 --- a/internal/elasticsearch/ingest/processor_date_data_source.go +++ b/internal/elasticsearch/ingest/processor_date_data_source.go @@ -2,6 +2,7 @@ package ingest import ( "context" + _ "embed" "encoding/json" "strings" @@ -12,6 +13,9 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) +//go:embed processor_date_data_source.md +var dateDataSourceDescription string + func DataSourceProcessorDate() *schema.Resource { processorSchema := map[string]*schema.Schema{ "id": { @@ -97,8 +101,7 @@ func DataSourceProcessorDate() *schema.Resource { } return &schema.Resource{ - Description: "Parses dates from fields, and then uses the date or timestamp as the timestamp for the document. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/date-processor.html", - + Description: dateDataSourceDescription, ReadContext: dataSourceProcessorDateRead, Schema: processorSchema, diff --git a/internal/elasticsearch/ingest/processor_date_data_source.md b/internal/elasticsearch/ingest/processor_date_data_source.md new file mode 100644 index 000000000..cb9280ff6 --- /dev/null +++ b/internal/elasticsearch/ingest/processor_date_data_source.md @@ -0,0 +1,3 @@ +Helper data source which can be used to create the configuration for a date processor. This processor parses dates from fields, and then uses the date or timestamp as the timestamp for the document. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/date-processor.html + +By default, the date processor adds the parsed date as a new field called `@timestamp`. You can specify a different field by setting the `target_field` configuration parameter. Multiple date formats are supported as part of the same date processor definition. They will be used sequentially to attempt parsing the date field, in the same order they were defined as part of the processor definition. diff --git a/internal/elasticsearch/ingest/processor_date_index_name_data_source.go b/internal/elasticsearch/ingest/processor_date_index_name_data_source.go index 6d0f02f75..416119385 100644 --- a/internal/elasticsearch/ingest/processor_date_index_name_data_source.go +++ b/internal/elasticsearch/ingest/processor_date_index_name_data_source.go @@ -2,6 +2,7 @@ package ingest import ( "context" + _ "embed" "encoding/json" "strings" @@ -12,6 +13,9 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) +//go:embed processor_date_index_name_data_source.md +var dateIndexNameDataSourceDescription string + func DataSourceProcessorDateIndexName() *schema.Resource { processorSchema := map[string]*schema.Schema{ "id": { @@ -101,7 +105,7 @@ func DataSourceProcessorDateIndexName() *schema.Resource { } return &schema.Resource{ - Description: "The purpose of this processor is to point documents to the right time based index based on a date or timestamp field in a document by using the date math index name support. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/date-index-name-processor.html", + Description: dateIndexNameDataSourceDescription, ReadContext: dataSourceProcessorDateIndexNameRead, diff --git a/internal/elasticsearch/ingest/processor_date_index_name_data_source.md b/internal/elasticsearch/ingest/processor_date_index_name_data_source.md new file mode 100644 index 000000000..0e47c3c88 --- /dev/null +++ b/internal/elasticsearch/ingest/processor_date_index_name_data_source.md @@ -0,0 +1,5 @@ +Helper data source which can be used to create the configuration for a date index name processor. The purpose of this processor is to point documents to the right time based index based on a date or timestamp field in a document by using the date math index name support. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/date-index-name-processor.html + +The processor sets the _index metadata field with a date math index name expression based on the provided index name prefix, a date or timestamp field in the documents being processed and the provided date rounding. + +First, this processor fetches the date or timestamp from a field in the document being processed. Optionally, date formatting can be configured on how the field’s value should be parsed into a date. Then this date, the provided index name prefix and the provided date rounding get formatted into a date math index name expression. Also here optionally date formatting can be specified on how the date should be formatted into a date math index name expression. diff --git a/internal/elasticsearch/ingest/processor_dissect_data_source.go b/internal/elasticsearch/ingest/processor_dissect_data_source.go index 290241563..726cec2ed 100644 --- a/internal/elasticsearch/ingest/processor_dissect_data_source.go +++ b/internal/elasticsearch/ingest/processor_dissect_data_source.go @@ -2,6 +2,7 @@ package ingest import ( "context" + _ "embed" "encoding/json" "strings" @@ -12,6 +13,9 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) +//go:embed processor_dissect_data_source.md +var dissectDataSourceDescription string + func DataSourceProcessorDissect() *schema.Resource { processorSchema := map[string]*schema.Schema{ "id": { @@ -81,7 +85,7 @@ func DataSourceProcessorDissect() *schema.Resource { } return &schema.Resource{ - Description: "Extracts structured fields out of a single text field within a document. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/dissect-processor.html#dissect-processor", + Description: dissectDataSourceDescription, ReadContext: dataSourceProcessorDissectRead, diff --git a/internal/elasticsearch/ingest/processor_dissect_data_source.md b/internal/elasticsearch/ingest/processor_dissect_data_source.md new file mode 100644 index 000000000..af7ec699b --- /dev/null +++ b/internal/elasticsearch/ingest/processor_dissect_data_source.md @@ -0,0 +1,5 @@ +Helper data source which can be used to create the configuration for a dissect processor. This processor extracts structured fields out of a single text field within a document. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/dissect-processor.html#dissect-processor + +Similar to the Grok Processor, dissect also extracts structured fields out of a single text field within a document. However unlike the Grok Processor, dissect does not use Regular Expressions. This allows dissect’s syntax to be simple and for some cases faster than the Grok Processor. + +Dissect matches a single text field against a defined pattern. \ No newline at end of file diff --git a/internal/elasticsearch/ingest/processor_dissect_dot_expander_source.go b/internal/elasticsearch/ingest/processor_dissect_dot_expander_source.go index 39d3d4209..b77e0573b 100644 --- a/internal/elasticsearch/ingest/processor_dissect_dot_expander_source.go +++ b/internal/elasticsearch/ingest/processor_dissect_dot_expander_source.go @@ -75,7 +75,7 @@ func DataSourceProcessorDotExpander() *schema.Resource { } return &schema.Resource{ - Description: "Expands a field with dots into an object field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/dot-expand-processor.html", + Description: "Helper data source which can be used to create the configuration for a dot expander processor. This processor expands a field with dots into an object field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/dot-expand-processor.html", ReadContext: dataSourceProcessorDotExpanderRead, diff --git a/internal/elasticsearch/ingest/processor_drop_data_source.go b/internal/elasticsearch/ingest/processor_drop_data_source.go index f9bffb4e3..e54dca00b 100644 --- a/internal/elasticsearch/ingest/processor_drop_data_source.go +++ b/internal/elasticsearch/ingest/processor_drop_data_source.go @@ -59,7 +59,7 @@ func DataSourceProcessorDrop() *schema.Resource { } return &schema.Resource{ - Description: "Drops the document without raising any errors. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/drop-processor.html", + Description: "Helper data source which can be used to create the configuration for a drop processor. This processor drops the document without raising any errors. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/drop-processor.html", ReadContext: dataSourceProcessorDropRead, diff --git a/internal/elasticsearch/ingest/processor_enrich_data_source.go b/internal/elasticsearch/ingest/processor_enrich_data_source.go index 36408d93c..d021b7e72 100644 --- a/internal/elasticsearch/ingest/processor_enrich_data_source.go +++ b/internal/elasticsearch/ingest/processor_enrich_data_source.go @@ -97,7 +97,7 @@ func DataSourceProcessorEnrich() *schema.Resource { } return &schema.Resource{ - Description: "The enrich processor can enrich documents with data from another index. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-processor.html", + Description: "Helper data source which can be used to create the configuration for an enrich processor. The enrich processor can enrich documents with data from another index. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-processor.html", ReadContext: dataSourceProcessorEnrichRead, diff --git a/internal/elasticsearch/ingest/processor_fail_data_source.go b/internal/elasticsearch/ingest/processor_fail_data_source.go index 60f1933c7..be53db7ea 100644 --- a/internal/elasticsearch/ingest/processor_fail_data_source.go +++ b/internal/elasticsearch/ingest/processor_fail_data_source.go @@ -64,7 +64,7 @@ func DataSourceProcessorFail() *schema.Resource { } return &schema.Resource{ - Description: "Raises an exception. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fail-processor.html", + Description: "Helper data source which can be used to create the configuration for a fail processor. This processor raises an exception. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fail-processor.html", ReadContext: dataSourceProcessorFailRead, diff --git a/internal/elasticsearch/ingest/processor_fingerprint_data_source.go b/internal/elasticsearch/ingest/processor_fingerprint_data_source.go index fa4457f6d..9f7ec0184 100644 --- a/internal/elasticsearch/ingest/processor_fingerprint_data_source.go +++ b/internal/elasticsearch/ingest/processor_fingerprint_data_source.go @@ -92,7 +92,7 @@ func DataSourceProcessorFingerprint() *schema.Resource { } return &schema.Resource{ - Description: "Computes a hash of the document’s content. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fingerprint-processor.html", + Description: "Helper data source which can be used to create the configuration for a fingerprint processor. This processor computes a hash of the document’s content. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fingerprint-processor.html", ReadContext: dataSourceProcessorFingerprintRead, diff --git a/internal/elasticsearch/ingest/processor_foreach_data_source.go b/internal/elasticsearch/ingest/processor_foreach_data_source.go index 194438424..c1d0e986b 100644 --- a/internal/elasticsearch/ingest/processor_foreach_data_source.go +++ b/internal/elasticsearch/ingest/processor_foreach_data_source.go @@ -2,6 +2,7 @@ package ingest import ( "context" + _ "embed" "encoding/json" "strings" @@ -12,6 +13,9 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) +//go:embed processor_foreach_data_source.md +var foreachDataSourceDescription string + func DataSourceProcessorForeach() *schema.Resource { processorSchema := map[string]*schema.Schema{ "id": { @@ -77,7 +81,7 @@ func DataSourceProcessorForeach() *schema.Resource { } return &schema.Resource{ - Description: "Runs an ingest processor on each element of an array or object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/foreach-processor.html", + Description: foreachDataSourceDescription, ReadContext: dataSourceProcessorForeachRead, diff --git a/internal/elasticsearch/ingest/processor_foreach_data_source.md b/internal/elasticsearch/ingest/processor_foreach_data_source.md new file mode 100644 index 000000000..a0493cdaa --- /dev/null +++ b/internal/elasticsearch/ingest/processor_foreach_data_source.md @@ -0,0 +1,13 @@ +Helper data source which can be used to create the configuration for a foreach processor. This processor runs an ingest processor on each element of an array or object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/foreach-processor.html + +All ingest processors can run on array or object elements. However, if the number of elements is unknown, it can be cumbersome to process each one in the same way. + +The `foreach` processor lets you specify a `field` containing array or object values and a `processor` to run on each element in the field. + +### Access keys and values + +When iterating through an array or object, the foreach processor stores the current element’s value in the `_ingest._value` ingest metadata field. `_ingest._value` contains the entire element value, including any child fields. You can access child field values using dot notation on the `_ingest._value` field. + +When iterating through an object, the foreach processor also stores the current element’s key as a string in `_ingest._key`. + +You can access and change `_ingest._key` and `_ingest._value` in the processor. \ No newline at end of file diff --git a/internal/elasticsearch/ingest/processor_geoip_data_source.go b/internal/elasticsearch/ingest/processor_geoip_data_source.go index f0f3ee1b7..bf302976f 100644 --- a/internal/elasticsearch/ingest/processor_geoip_data_source.go +++ b/internal/elasticsearch/ingest/processor_geoip_data_source.go @@ -2,6 +2,7 @@ package ingest import ( "context" + _ "embed" "encoding/json" "github.com/elastic/terraform-provider-elasticstack/internal/models" @@ -10,6 +11,9 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) +//go:embed processor_geoip_data_source.md +var geoipDataSourceDescription string + func DataSourceProcessorGeoip() *schema.Resource { processorSchema := map[string]*schema.Schema{ "id": { @@ -61,7 +65,7 @@ func DataSourceProcessorGeoip() *schema.Resource { } return &schema.Resource{ - Description: "The geoip processor adds information about the geographical location of an IPv4 or IPv6 address. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html", + Description: geoipDataSourceDescription, ReadContext: dataSourceProcessorGeoipRead, diff --git a/internal/elasticsearch/ingest/processor_geoip_data_source.md b/internal/elasticsearch/ingest/processor_geoip_data_source.md new file mode 100644 index 000000000..1aa39cb5e --- /dev/null +++ b/internal/elasticsearch/ingest/processor_geoip_data_source.md @@ -0,0 +1,7 @@ +Helper data source which can be used to create the configuration for a geoip processor. The geoip processor adds information about the geographical location of an IPv4 or IPv6 address. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html + +By default, the processor uses the GeoLite2 City, GeoLite2 Country, and GeoLite2 ASN GeoIP2 databases from MaxMind, shared under the CC BY-SA 4.0 license. Elasticsearch automatically downloads updates for these databases from the Elastic GeoIP endpoint: https://geoip.elastic.co/v1/database. To get download statistics for these updates, use the GeoIP stats API. + +If your cluster can’t connect to the Elastic GeoIP endpoint or you want to manage your own updates, [see Manage your own GeoIP2 database updates](https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html#manage-geoip-database-updates). + +If Elasticsearch can’t connect to the endpoint for 30 days all updated databases will become invalid. Elasticsearch will stop enriching documents with geoip data and will add `tags: ["_geoip_expired_database"]` field instead. diff --git a/internal/elasticsearch/ingest/processor_grok_data_source.go b/internal/elasticsearch/ingest/processor_grok_data_source.go index e6f151676..de60803e8 100644 --- a/internal/elasticsearch/ingest/processor_grok_data_source.go +++ b/internal/elasticsearch/ingest/processor_grok_data_source.go @@ -2,6 +2,7 @@ package ingest import ( "context" + _ "embed" "encoding/json" "strings" @@ -12,6 +13,9 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) +//go:embed processor_grok_data_source.md +var grokDataSourceDescription string + func DataSourceProcessorGrok() *schema.Resource { processorSchema := map[string]*schema.Schema{ "id": { @@ -99,7 +103,7 @@ func DataSourceProcessorGrok() *schema.Resource { } return &schema.Resource{ - Description: "Extracts structured fields out of a single text field within a document. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/grok-processor.html", + Description: grokDataSourceDescription, ReadContext: dataSourceProcessorGrokRead, diff --git a/internal/elasticsearch/ingest/processor_grok_data_source.md b/internal/elasticsearch/ingest/processor_grok_data_source.md new file mode 100644 index 000000000..5ce77e195 --- /dev/null +++ b/internal/elasticsearch/ingest/processor_grok_data_source.md @@ -0,0 +1,5 @@ +Helper data source which can be used to create the configuration for a grok processor. This processor extracts structured fields out of a single text field within a document. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/grok-processor.html + +This processor comes packaged with many [reusable patterns](https://github.com/elastic/elasticsearch/blob/master/libs/grok/src/main/resources/patterns). + +If you need help building patterns to match your logs, you will find the [Grok Debugger](https://www.elastic.co/guide/en/kibana/master/xpack-grokdebugger.html) tool quite useful! [The Grok Constructor](https://grokconstructor.appspot.com/) is also a useful tool. \ No newline at end of file diff --git a/internal/elasticsearch/ingest/processor_gsub_data_source.go b/internal/elasticsearch/ingest/processor_gsub_data_source.go index 75fe285dc..45933f138 100644 --- a/internal/elasticsearch/ingest/processor_gsub_data_source.go +++ b/internal/elasticsearch/ingest/processor_gsub_data_source.go @@ -85,7 +85,7 @@ func DataSourceProcessorGsub() *schema.Resource { } return &schema.Resource{ - Description: "Converts a string field by applying a regular expression and a replacement. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/gsub-processor.html", + Description: "Helper data source which can be used to create the configuration for a gsub processor. This processor converts a string field by applying a regular expression and a replacement. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/gsub-processor.html", ReadContext: dataSourceProcessorGsubRead, diff --git a/internal/elasticsearch/ingest/processor_html_strip_data_source.go b/internal/elasticsearch/ingest/processor_html_strip_data_source.go index b4b35ad0b..0b167025c 100644 --- a/internal/elasticsearch/ingest/processor_html_strip_data_source.go +++ b/internal/elasticsearch/ingest/processor_html_strip_data_source.go @@ -75,7 +75,7 @@ func DataSourceProcessorHtmlStrip() *schema.Resource { } return &schema.Resource{ - Description: "Removes HTML tags from the field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/htmlstrip-processor.html", + Description: "Helper data source which can be used to create the configuration for an HTML strip processor. This processor removes HTML tags from the field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/htmlstrip-processor.html", ReadContext: dataSourceProcessorHtmlStripRead, diff --git a/internal/elasticsearch/ingest/processor_join_data_source.go b/internal/elasticsearch/ingest/processor_join_data_source.go index 2ceee356f..9f9f34ea5 100644 --- a/internal/elasticsearch/ingest/processor_join_data_source.go +++ b/internal/elasticsearch/ingest/processor_join_data_source.go @@ -74,7 +74,7 @@ func DataSourceProcessorJoin() *schema.Resource { } return &schema.Resource{ - Description: "Joins each element of an array into a single string using a separator character between each element. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/join-processor.html", + Description: "Helper data source which can be used to create the configuration for a join processor. This processor joins each element of an array into a single string using a separator character between each element. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/join-processor.html", ReadContext: dataSourceProcessorJoinRead, diff --git a/internal/elasticsearch/ingest/processor_json_data_source.go b/internal/elasticsearch/ingest/processor_json_data_source.go index a36856503..e2bddc5ee 100644 --- a/internal/elasticsearch/ingest/processor_json_data_source.go +++ b/internal/elasticsearch/ingest/processor_json_data_source.go @@ -86,7 +86,7 @@ func DataSourceProcessorJson() *schema.Resource { } return &schema.Resource{ - Description: "Converts a JSON string into a structured JSON object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/json-processor.html", + Description: "Helper data source which can be used to create the configuration for a JSON processor. This processor converts a JSON string into a structured JSON object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/json-processor.html", ReadContext: dataSourceProcessorJsonRead, diff --git a/internal/elasticsearch/ingest/processor_kv_data_source.go b/internal/elasticsearch/ingest/processor_kv_data_source.go index 574eb6f6f..cf81a7db3 100644 --- a/internal/elasticsearch/ingest/processor_kv_data_source.go +++ b/internal/elasticsearch/ingest/processor_kv_data_source.go @@ -124,7 +124,7 @@ func DataSourceProcessorKV() *schema.Resource { } return &schema.Resource{ - Description: "This processor helps automatically parse messages (or specific event fields) which are of the foo=bar variety. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/kv-processor.html", + Description: "Helper data source which can be used to create the configuration for a KV processor. This processor helps automatically parse messages (or specific event fields) which are of the foo=bar variety. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/kv-processor.html", ReadContext: dataSourceProcessorKVRead, diff --git a/internal/elasticsearch/ingest/processor_lowercase_data_source.go b/internal/elasticsearch/ingest/processor_lowercase_data_source.go index 3d4b620f1..5a950deb7 100644 --- a/internal/elasticsearch/ingest/processor_lowercase_data_source.go +++ b/internal/elasticsearch/ingest/processor_lowercase_data_source.go @@ -75,7 +75,7 @@ func DataSourceProcessorLowercase() *schema.Resource { } return &schema.Resource{ - Description: "Converts a string to its lowercase equivalent. If the field is an array of strings, all members of the array will be converted. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/lowercase-processor.html", + Description: "Helper data source which can be used to create the configuration for a lowercase processor. This processor converts a string to its lowercase equivalent. If the field is an array of strings, all members of the array will be converted. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/lowercase-processor.html", ReadContext: dataSourceProcessorLowercaseRead, diff --git a/internal/elasticsearch/ingest/processor_network_direction_data_source.go b/internal/elasticsearch/ingest/processor_network_direction_data_source.go index 7c9cfaed4..fac66a3b0 100644 --- a/internal/elasticsearch/ingest/processor_network_direction_data_source.go +++ b/internal/elasticsearch/ingest/processor_network_direction_data_source.go @@ -5,6 +5,8 @@ import ( "encoding/json" "strings" + _ "embed" + "github.com/elastic/terraform-provider-elasticstack/internal/models" "github.com/elastic/terraform-provider-elasticstack/internal/utils" "github.com/hashicorp/terraform-plugin-sdk/v2/diag" @@ -12,6 +14,9 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) +//go:embed processor_network_direction_data_source.md +var networkDirectionDataSourceDescription string + func DataSourceProcessorNetworkDirection() *schema.Resource { processorSchema := map[string]*schema.Schema{ "id": { @@ -98,8 +103,7 @@ func DataSourceProcessorNetworkDirection() *schema.Resource { } return &schema.Resource{ - Description: "Calculates the network direction given a source IP address, destination IP address, and a list of internal networks. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/network-direction-processor.html", - + Description: networkDirectionDataSourceDescription, ReadContext: dataSourceProcessorNetworkDirectionRead, Schema: processorSchema, diff --git a/internal/elasticsearch/ingest/processor_network_direction_data_source.md b/internal/elasticsearch/ingest/processor_network_direction_data_source.md new file mode 100644 index 000000000..dae4f0936 --- /dev/null +++ b/internal/elasticsearch/ingest/processor_network_direction_data_source.md @@ -0,0 +1,19 @@ +Helper data source which can be used to create the configuration for a network direction processor. This processor calculates the network direction given a source IP address, destination IP address, and a list of internal networks. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/network-direction-processor.html + +The network direction processor reads IP addresses from Elastic Common Schema (ECS) fields by default. If you use the ECS, only the `internal_networks` option must be specified. + +One of either `internal_networks` or `internal_networks_field` must be specified. If `internal_networks_field` is specified, it follows the behavior specified by `ignore_missing`. + +### Supported named network ranges + +The named ranges supported for the internal_networks option are: + +* `loopback` - Matches loopback addresses in the range of 127.0.0.0/8 or ::1/128. +* `unicast` or `global_unicast` - Matches global unicast addresses defined in RFC 1122, RFC 4632, and RFC 4291 with the exception of the IPv4 broadcast address (255.255.255.255). This includes private address ranges. +* `multicast` - Matches multicast addresses. +* `interface_local_multicast` - Matches IPv6 interface-local multicast addresses. +* `link_local_unicast` - Matches link-local unicast addresses. +* `link_local_multicast` - Matches link-local multicast addresses. +* `private` - Matches private address ranges defined in RFC 1918 (IPv4) and RFC 4193 (IPv6). +* `public` - Matches addresses that are not loopback, unspecified, IPv4 broadcast, link local unicast, link local multicast, interface local multicast, or private. +* `unspecified` - Matches unspecified addresses (either the IPv4 address "0.0.0.0" or the IPv6 address "::"). \ No newline at end of file diff --git a/internal/elasticsearch/ingest/processor_pipeline_data_source.go b/internal/elasticsearch/ingest/processor_pipeline_data_source.go index f96b4a1d4..a315abed8 100644 --- a/internal/elasticsearch/ingest/processor_pipeline_data_source.go +++ b/internal/elasticsearch/ingest/processor_pipeline_data_source.go @@ -64,7 +64,7 @@ func DataSourceProcessorPipeline() *schema.Resource { } return &schema.Resource{ - Description: "Executes another pipeline. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/pipeline-processor.html", + Description: "Helper data source which can be used to create the configuration for a pipeline processor. This processor executes another pipeline. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/pipeline-processor.html", ReadContext: dataSourceProcessorPipelineRead, diff --git a/internal/elasticsearch/ingest/processor_registered_domain_data_source.go b/internal/elasticsearch/ingest/processor_registered_domain_data_source.go index 21f7760f3..7ba192647 100644 --- a/internal/elasticsearch/ingest/processor_registered_domain_data_source.go +++ b/internal/elasticsearch/ingest/processor_registered_domain_data_source.go @@ -75,7 +75,7 @@ func DataSourceProcessorRegisteredDomain() *schema.Resource { } return &schema.Resource{ - Description: "Extracts the registered domain (also known as the effective top-level domain or eTLD), sub-domain, and top-level domain from a fully qualified domain name (FQDN). See: https://www.elastic.co/guide/en/elasticsearch/reference/current/registered-domain-processor.html", + Description: "Helper data source which can be used to create the configuration for a registered domain processor. This processor extracts the registered domain (also known as the effective top-level domain or eTLD), sub-domain, and top-level domain from a fully qualified domain name (FQDN). See: https://www.elastic.co/guide/en/elasticsearch/reference/current/registered-domain-processor.html", ReadContext: dataSourceProcessorRegisteredDomainRead, diff --git a/internal/elasticsearch/ingest/processor_remove_data_source.go b/internal/elasticsearch/ingest/processor_remove_data_source.go index 00842809a..4a261b603 100644 --- a/internal/elasticsearch/ingest/processor_remove_data_source.go +++ b/internal/elasticsearch/ingest/processor_remove_data_source.go @@ -74,7 +74,7 @@ func DataSourceProcessorRemove() *schema.Resource { } return &schema.Resource{ - Description: "Removes existing fields. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/remove-processor.html", + Description: "Helper data source which can be used to create the configuration for a remove processor. This processor removes existing fields. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/remove-processor.html", ReadContext: dataSourceProcessorRemoveRead, diff --git a/internal/elasticsearch/ingest/processor_rename_data_source.go b/internal/elasticsearch/ingest/processor_rename_data_source.go index a18862b87..2f9a64cc9 100644 --- a/internal/elasticsearch/ingest/processor_rename_data_source.go +++ b/internal/elasticsearch/ingest/processor_rename_data_source.go @@ -75,7 +75,7 @@ func DataSourceProcessorRename() *schema.Resource { } return &schema.Resource{ - Description: "Renames an existing field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/rename-processor.html", + Description: "Helper data source which can be used to create the configuration for a rename processor. This processor renames an existing field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/rename-processor.html", ReadContext: dataSourceProcessorRenameRead, diff --git a/internal/elasticsearch/ingest/processor_reroute_data_source.go b/internal/elasticsearch/ingest/processor_reroute_data_source.go index a416fad34..057a245db 100644 --- a/internal/elasticsearch/ingest/processor_reroute_data_source.go +++ b/internal/elasticsearch/ingest/processor_reroute_data_source.go @@ -74,7 +74,7 @@ func DataSourceProcessorReroute() *schema.Resource { } return &schema.Resource{ - Description: "Reroutes a document to a different data stream, index, or index alias. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/reroute-processor.html", + Description: "Helper data source which can be used to create the configuration for a reroute processor. This processor reroutes a document to a different data stream, index, or index alias. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/reroute-processor.html", ReadContext: dataSourceProcessorRerouteRead, diff --git a/internal/elasticsearch/ingest/processor_script_data_source.go b/internal/elasticsearch/ingest/processor_script_data_source.go index 1d4a3fc0a..5ff75c2f8 100644 --- a/internal/elasticsearch/ingest/processor_script_data_source.go +++ b/internal/elasticsearch/ingest/processor_script_data_source.go @@ -85,7 +85,7 @@ func DataSourceProcessorScript() *schema.Resource { } return &schema.Resource{ - Description: "Runs an inline or stored script on incoming documents. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/script-processor.html", + Description: "Helper data source which can be used to create the configuration for a script processor. This processor runs an inline or stored script on incoming documents. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/script-processor.html", ReadContext: dataSourceProcessorScriptRead, diff --git a/internal/elasticsearch/ingest/processor_script_data_source.md b/internal/elasticsearch/ingest/processor_script_data_source.md new file mode 100644 index 000000000..811568cf1 --- /dev/null +++ b/internal/elasticsearch/ingest/processor_script_data_source.md @@ -0,0 +1,11 @@ +Helper data source which can be used to create the configuration for a script processor. This processor runs an inline or stored script on incoming documents. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/script-processor.html + +The script processor uses the script cache to avoid recompiling the script for each incoming document. To improve performance, ensure the script cache is properly sized before using a script processor in production. + +### Access source fields + +The script processor parses each incoming document’s JSON source fields into a set of maps, lists, and primitives. To access these fields with a Painless script, use the map access operator: `ctx['my-field']`. You can also use the shorthand `ctx.` syntax. + +### Access metadata fields + +You can also use a script processor to access metadata fields. \ No newline at end of file diff --git a/internal/elasticsearch/ingest/processor_set_data_source.go b/internal/elasticsearch/ingest/processor_set_data_source.go index 6c866f1d0..b26e3bacd 100644 --- a/internal/elasticsearch/ingest/processor_set_data_source.go +++ b/internal/elasticsearch/ingest/processor_set_data_source.go @@ -96,7 +96,7 @@ func DataSourceProcessorSet() *schema.Resource { } return &schema.Resource{ - Description: "Sets one field and associates it with the specified value. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/set-processor.html", + Description: "Helper data source which can be used to create the configuration for a set processor. This processor sets one field and associates it with the specified value. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/set-processor.html", ReadContext: dataSourceProcessorSetRead, diff --git a/internal/elasticsearch/ingest/processor_set_security_user_data_source.go b/internal/elasticsearch/ingest/processor_set_security_user_data_source.go index 511f41497..d2f3b63be 100644 --- a/internal/elasticsearch/ingest/processor_set_security_user_data_source.go +++ b/internal/elasticsearch/ingest/processor_set_security_user_data_source.go @@ -73,7 +73,7 @@ func DataSourceProcessorSetSecurityUser() *schema.Resource { } return &schema.Resource{ - Description: "Sets user-related details (such as username, roles, email, full_name, metadata, api_key, realm and authentication_type) from the current authenticated user to the current document by pre-processing the ingest. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-node-set-security-user-processor.html", + Description: "Helper data source which can be used to create the configuration for a set security user processor. This processor sets user-related details (such as username, roles, email, full_name, metadata, api_key, realm and authentication_type) from the current authenticated user to the current document by pre-processing the ingest. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-node-set-security-user-processor.html", ReadContext: dataSourceProcessorSetSecurityUserRead, diff --git a/internal/elasticsearch/ingest/processor_set_security_user_data_source.md b/internal/elasticsearch/ingest/processor_set_security_user_data_source.md new file mode 100644 index 000000000..b95143d1b --- /dev/null +++ b/internal/elasticsearch/ingest/processor_set_security_user_data_source.md @@ -0,0 +1,8 @@ +Helper data source which can be used to create the configuration for a set security user processor. This processor sets user-related details (such as `username`, `roles`, `email`, `full_name`, `metadata`, `api_key`, `realm` and `authentication_type`) from the current authenticated user to the current document by pre-processing the ingest. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-node-set-security-user-processor.html + +The `api_key` property exists only if the user authenticates with an API key. It is an object containing the `id`, `name` and `metadata` (if it exists and is non-empty) fields of the API key. + +The `realm` property is also an object with two fields, name and type. When using API key authentication, the realm property refers to the realm from which the API key is created. + +The `authentication_type` property is a string that can take value from `REALM`, `API_KEY`, `TOKEN` and `ANONYMOUS`. + diff --git a/internal/elasticsearch/ingest/processor_sort_data_source.go b/internal/elasticsearch/ingest/processor_sort_data_source.go index 078d6c238..6f3a7cb4c 100644 --- a/internal/elasticsearch/ingest/processor_sort_data_source.go +++ b/internal/elasticsearch/ingest/processor_sort_data_source.go @@ -76,7 +76,7 @@ func DataSourceProcessorSort() *schema.Resource { } return &schema.Resource{ - Description: "Sorts the elements of an array ascending or descending. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/sort-processor.html", + Description: "Helper data source which can be used to create the configuration for a sort processor. This processor sorts the elements of an array ascending or descending. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/sort-processor.html", ReadContext: dataSourceProcessorSortRead, diff --git a/internal/elasticsearch/ingest/processor_split_data_source.go b/internal/elasticsearch/ingest/processor_split_data_source.go index be90f0f43..2637004f2 100644 --- a/internal/elasticsearch/ingest/processor_split_data_source.go +++ b/internal/elasticsearch/ingest/processor_split_data_source.go @@ -86,7 +86,7 @@ func DataSourceProcessorSplit() *schema.Resource { } return &schema.Resource{ - Description: "Splits a field into an array using a separator character. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/split-processor.html", + Description: "Helper data source which can be used to create the configuration for a split processor. This processor splits a field into an array using a separator character. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/split-processor.html", ReadContext: dataSourceProcessorSplitRead, diff --git a/internal/elasticsearch/ingest/processor_trim_data_source.go b/internal/elasticsearch/ingest/processor_trim_data_source.go index e45eb6572..53208920c 100644 --- a/internal/elasticsearch/ingest/processor_trim_data_source.go +++ b/internal/elasticsearch/ingest/processor_trim_data_source.go @@ -75,7 +75,7 @@ func DataSourceProcessorTrim() *schema.Resource { } return &schema.Resource{ - Description: "Trims whitespace from field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/trim-processor.html", + Description: "Helper data source which can be used to create the configuration for a trim processor. This processor trims whitespace from field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/trim-processor.html", ReadContext: dataSourceProcessorTrimRead, diff --git a/internal/elasticsearch/ingest/processor_uppercase_data_source.go b/internal/elasticsearch/ingest/processor_uppercase_data_source.go index ef9d1cfbd..3e258b85e 100644 --- a/internal/elasticsearch/ingest/processor_uppercase_data_source.go +++ b/internal/elasticsearch/ingest/processor_uppercase_data_source.go @@ -75,7 +75,7 @@ func DataSourceProcessorUppercase() *schema.Resource { } return &schema.Resource{ - Description: "Converts a string to its uppercase equivalent. If the field is an array of strings, all members of the array will be converted. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uppercase-processor.html", + Description: "Helper data source which can be used to create the configuration for an uppercase processor. This processor converts a string to its uppercase equivalent. If the field is an array of strings, all members of the array will be converted. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uppercase-processor.html", ReadContext: dataSourceProcessorUppercaseRead, diff --git a/internal/elasticsearch/ingest/processor_uri_parts_data_source.go b/internal/elasticsearch/ingest/processor_uri_parts_data_source.go index 9417dc826..19d65d9fd 100644 --- a/internal/elasticsearch/ingest/processor_uri_parts_data_source.go +++ b/internal/elasticsearch/ingest/processor_uri_parts_data_source.go @@ -81,7 +81,7 @@ func DataSourceProcessorUriParts() *schema.Resource { } return &schema.Resource{ - Description: "Parses a Uniform Resource Identifier (URI) string and extracts its components as an object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uri-parts-processor.html", + Description: "Helper data source which can be used to create the configuration for a URI parts processor. This processor parses a Uniform Resource Identifier (URI) string and extracts its components as an object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uri-parts-processor.html", ReadContext: dataSourceProcessorUriPartsRead, diff --git a/internal/elasticsearch/ingest/processor_urldecode_data_source.go b/internal/elasticsearch/ingest/processor_urldecode_data_source.go index a8beb0032..0f3d3c6c0 100644 --- a/internal/elasticsearch/ingest/processor_urldecode_data_source.go +++ b/internal/elasticsearch/ingest/processor_urldecode_data_source.go @@ -75,7 +75,7 @@ func DataSourceProcessorUrldecode() *schema.Resource { } return &schema.Resource{ - Description: "URL-decodes a string. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/urldecode-processor.html", + Description: "Helper data source which can be used to create the configuration for a URL-decode processor. This processor URL-decodes a string. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/urldecode-processor.html", ReadContext: dataSourceProcessorUrldecodeRead, diff --git a/internal/elasticsearch/ingest/processor_user_agent_data_source.go b/internal/elasticsearch/ingest/processor_user_agent_data_source.go index 6ee23ec3c..529fd7fe3 100644 --- a/internal/elasticsearch/ingest/processor_user_agent_data_source.go +++ b/internal/elasticsearch/ingest/processor_user_agent_data_source.go @@ -61,7 +61,7 @@ func DataSourceProcessorUserAgent() *schema.Resource { } return &schema.Resource{ - Description: "Extracts details from the user agent string a browser sends with its web requests. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/user-agent-processor.html", + Description: "Helper data source which can be used to create the configuration for a user agent processor. This processor extracts details from the user agent string a browser sends with its web requests. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/user-agent-processor.html", ReadContext: dataSourceProcessorUserAgentRead, diff --git a/internal/elasticsearch/security/system_user/resource-description.md b/internal/elasticsearch/security/system_user/resource-description.md new file mode 100644 index 000000000..ec90ae398 --- /dev/null +++ b/internal/elasticsearch/security/system_user/resource-description.md @@ -0,0 +1,3 @@ +Updates system user's password and enablement. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/built-in-users.html + +Since this resource is to manage built-in users, destroy will not delete the underlying Elasticsearch and will only remove it from Terraform state. diff --git a/internal/elasticsearch/security/system_user/schema.go b/internal/elasticsearch/security/system_user/schema.go index 6b529eceb..10e683185 100644 --- a/internal/elasticsearch/security/system_user/schema.go +++ b/internal/elasticsearch/security/system_user/schema.go @@ -2,6 +2,7 @@ package system_user import ( "context" + _ "embed" "regexp" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" @@ -16,13 +17,16 @@ import ( providerschema "github.com/elastic/terraform-provider-elasticstack/internal/schema" ) +//go:embed resource-description.md +var systemUserResourceDescription string + func (r *systemUserResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { resp.Schema = GetSchema() } func GetSchema() schema.Schema { return schema.Schema{ - MarkdownDescription: "Updates system user's password and enablement. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/built-in-users.html", + MarkdownDescription: systemUserResourceDescription, Blocks: map[string]schema.Block{ "elasticsearch_connection": providerschema.GetEsFWConnectionBlock("elasticsearch_connection", false), }, diff --git a/internal/elasticsearch/transform/transform.go b/internal/elasticsearch/transform/transform.go index 128509c63..660cc4c3a 100644 --- a/internal/elasticsearch/transform/transform.go +++ b/internal/elasticsearch/transform/transform.go @@ -2,6 +2,7 @@ package transform import ( "context" + _ "embed" "encoding/json" "fmt" "regexp" @@ -303,7 +304,7 @@ func ResourceTransform() *schema.Resource { return &schema.Resource{ Schema: transformSchema, - Description: "Manages Elasticsearch transforms. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/transforms.html", + Description: transformDescription, CreateContext: resourceTransformCreate, ReadContext: resourceTransformRead, @@ -877,3 +878,6 @@ func isSettingAllowed(ctx context.Context, settingName string, serverVersion *ve return true } + +//go:embed transform.md +var transformDescription string diff --git a/internal/elasticsearch/transform/transform.md b/internal/elasticsearch/transform/transform.md new file mode 100644 index 000000000..e699af31f --- /dev/null +++ b/internal/elasticsearch/transform/transform.md @@ -0,0 +1,3 @@ +Manages Elasticsearch transforms. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/transforms.html + +**NOTE:** Some transform settings require a minimum Elasticsearch version. Such settings will be ignored when applied to versions below the required one (a warning will be issued in the logs). diff --git a/internal/fleet/integration/schema.go b/internal/fleet/integration/schema.go index 17ef3faf8..0f9e24cf0 100644 --- a/internal/fleet/integration/schema.go +++ b/internal/fleet/integration/schema.go @@ -10,7 +10,12 @@ import ( ) func (r *integrationResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { - resp.Schema.Description = "Manage installation of a Fleet integration package." + resp.Schema.Description = `Installs or uninstalls a Fleet integration package. The Kibana Fleet UI can be +used to view available packages. Additional information for managing integration +packages can be found [here](https://www.elastic.co/guide/en/fleet/current/install-uninstall-integration-assets.html). + +To prevent the package from being uninstalled when the resource is destroyed, +set ` + "`skip_destroy` to `true`." resp.Schema.Attributes = map[string]schema.Attribute{ "id": schema.StringAttribute{ Description: "The ID of this resource.", diff --git a/internal/fleet/integration_ds/schema.go b/internal/fleet/integration_ds/schema.go index 8ab07a904..f2305638d 100644 --- a/internal/fleet/integration_ds/schema.go +++ b/internal/fleet/integration_ds/schema.go @@ -8,7 +8,16 @@ import ( ) func (d *integrationDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { - resp.Schema.Description = "Retrieves the latest version of an integration package in Fleet." + resp.Schema.Description = `This data source provides information about a Fleet integration package. Currently, +the data source will retrieve the latest available version of the package. Version +selection is determined by the Fleet API, which is currently based on semantic +versioning. + +By default, the highest GA release version will be selected. If a +package is not GA (the version is below 1.0.0) or if a new non-GA version of the +package is to be selected (i.e., the GA version of the package is 1.5.0, but there's +a new 1.5.1-beta version available), then the ` + "`prerelease`" + ` parameter in the plan +should be set to ` + "`true`." resp.Schema.Attributes = map[string]schema.Attribute{ "id": schema.StringAttribute{ Description: "The ID of this resource.", diff --git a/internal/fleet/integration_policy/resource-description.md b/internal/fleet/integration_policy/resource-description.md new file mode 100644 index 000000000..f809f4ea8 --- /dev/null +++ b/internal/fleet/integration_policy/resource-description.md @@ -0,0 +1,10 @@ +Creates or updates a Fleet Integration Policy. + +It is highly recommended that all inputs and streams are provided in the +Terraform plan, even if some are disabled. Otherwise, differences may appear +between what is in the plan versus what is returned by the Fleet API. + +The [Kibana Fleet UI](https://www.elastic.co/guide/en/fleet/current/add-integration-to-policy.html) +can be used as a reference for what data needs to be provided. Instead of saving +a new integration configuration, the API request can be previewed, showing what +values need to be provided for inputs and their streams. diff --git a/internal/fleet/integration_policy/schema.go b/internal/fleet/integration_policy/schema.go index 055a70614..903c25a09 100644 --- a/internal/fleet/integration_policy/schema.go +++ b/internal/fleet/integration_policy/schema.go @@ -2,6 +2,7 @@ package integration_policy import ( "context" + _ "embed" "github.com/hashicorp/terraform-plugin-framework-jsontypes/jsontypes" "github.com/hashicorp/terraform-plugin-framework/attr" @@ -12,6 +13,9 @@ import ( "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" ) +//go:embed resource-description.md +var integrationPolicyDescription string + func (r *integrationPolicyResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { resp.Schema = getSchemaV1() } @@ -19,7 +23,7 @@ func (r *integrationPolicyResource) Schema(ctx context.Context, req resource.Sch func getSchemaV1() schema.Schema { return schema.Schema{ Version: 1, - Description: "Creates a new Fleet Integration Policy. See https://www.elastic.co/guide/en/fleet/current/add-integration-to-policy.html", + Description: integrationPolicyDescription, Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{ Description: "The ID of this resource.", diff --git a/internal/kibana/alerting.go b/internal/kibana/alerting.go index 9aadcbe70..85232dffc 100644 --- a/internal/kibana/alerting.go +++ b/internal/kibana/alerting.go @@ -2,6 +2,7 @@ package kibana import ( "context" + _ "embed" "encoding/json" "fmt" "strings" @@ -22,6 +23,9 @@ var frequencyMinSupportedVersion = version.Must(version.NewVersion("8.6.0")) var alertsFilterMinSupportedVersion = version.Must(version.NewVersion("8.9.0")) var alertDelayMinSupportedVersion = version.Must(version.NewVersion("8.13.0")) +//go:embed alerting.md +var resourceRuleDescription string + func ResourceAlertingRule() *schema.Resource { apikeySchema := map[string]*schema.Schema{ "rule_id": { @@ -224,8 +228,7 @@ func ResourceAlertingRule() *schema.Resource { } return &schema.Resource{ - Description: "Creates a Kibana rule. See https://www.elastic.co/guide/en/kibana/master/create-rule-api.html", - + Description: resourceRuleDescription, CreateContext: resourceRuleCreate, UpdateContext: resourceRuleUpdate, ReadContext: resourceRuleRead, diff --git a/internal/kibana/alerting.md b/internal/kibana/alerting.md new file mode 100644 index 000000000..8fa6a0980 --- /dev/null +++ b/internal/kibana/alerting.md @@ -0,0 +1,7 @@ +Creates a Kibana rule. See https://www.elastic.co/guide/en/kibana/master/create-rule-api.html + +**NOTE:** `api_key` authentication is only supported for alerting rule resources from version 8.8.0 of the Elastic stack. Using an `api_key` will result in an error message like: + +``` +Could not create API key - Unsupported scheme "ApiKey" for granting API Key +``` \ No newline at end of file diff --git a/internal/kibana/data_view/schema.go b/internal/kibana/data_view/schema.go index f803b57bd..32b100f92 100644 --- a/internal/kibana/data_view/schema.go +++ b/internal/kibana/data_view/schema.go @@ -24,7 +24,7 @@ func (r *DataViewResource) Schema(_ context.Context, _ resource.SchemaRequest, r func getSchema() schema.Schema { return schema.Schema{ - Description: "Manages Kibana data views", + MarkdownDescription: "Manages Kibana [data views](https://www.elastic.co/guide/en/kibana/current/data-views-api.html)", Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{ Computed: true, diff --git a/internal/kibana/import_saved_objects/schema.go b/internal/kibana/import_saved_objects/schema.go index 7f448e3a3..fd2fe1e9e 100644 --- a/internal/kibana/import_saved_objects/schema.go +++ b/internal/kibana/import_saved_objects/schema.go @@ -34,7 +34,7 @@ var _ resource.ResourceWithConfigure = &Resource{} func (r *Resource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { resp.Schema = schema.Schema{ - Description: "Imports saved objects from the referenced file", + Description: "Create sets of Kibana saved objects from a file created by the export API. See https://www.elastic.co/guide/en/kibana/current/saved-objects-api-import.html", Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{ Computed: true, diff --git a/internal/kibana/maintenance_window/schema.go b/internal/kibana/maintenance_window/schema.go index 4ad24e1c5..f9b51f446 100644 --- a/internal/kibana/maintenance_window/schema.go +++ b/internal/kibana/maintenance_window/schema.go @@ -19,7 +19,7 @@ import ( func (r *MaintenanceWindowResource) Schema(_ context.Context, _ resource.SchemaRequest, resp *resource.SchemaResponse) { resp.Schema = schema.Schema{ - Description: "Manages Kibana maintenance windows", + MarkdownDescription: "Creates and manages Kibana [maintenance windows](https://www.elastic.co/docs/api/doc/kibana/group/endpoint-maintenance-window)", Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{ Computed: true, diff --git a/internal/kibana/role.go b/internal/kibana/role.go index 0138545b1..d44750565 100644 --- a/internal/kibana/role.go +++ b/internal/kibana/role.go @@ -6,6 +6,8 @@ import ( "fmt" "strings" + _ "embed" + "github.com/disaster37/go-kibana-rest/v8/kbapi" "github.com/elastic/terraform-provider-elasticstack/internal/clients" "github.com/elastic/terraform-provider-elasticstack/internal/utils" @@ -20,6 +22,9 @@ var ( minSupportedDescriptionVersion = version.Must(version.NewVersion("8.15.0")) ) +//go:embed role.md +var roleDescription string + func ResourceRole() *schema.Resource { roleSchema := map[string]*schema.Schema{ "name": { @@ -243,7 +248,7 @@ func ResourceRole() *schema.Resource { } return &schema.Resource{ - Description: "Creates a Kibana role. See, https://www.elastic.co/guide/en/kibana/master/role-management-api-put.html", + Description: roleDescription, CreateContext: resourceRoleUpsert, UpdateContext: resourceRoleUpsert, diff --git a/internal/kibana/role.md b/internal/kibana/role.md new file mode 100644 index 000000000..53877e8fe --- /dev/null +++ b/internal/kibana/role.md @@ -0,0 +1,5 @@ +Creates or updates a Kibana role. See https://www.elastic.co/guide/en/kibana/master/role-management-api-put.html + +For Features, see: https://www.elastic.co/guide/en/kibana/current/features-api-get.html + +For Security Privileges, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/security-privileges.html diff --git a/internal/kibana/slo.go b/internal/kibana/slo.go index fb83c4a72..b8eeab1f7 100644 --- a/internal/kibana/slo.go +++ b/internal/kibana/slo.go @@ -24,7 +24,7 @@ var ( func ResourceSlo() *schema.Resource { return &schema.Resource{ - Description: "Creates an SLO.", + Description: `Creates or updates a Kibana SLO. See the [Kibana SLO docs](https://www.elastic.co/guide/en/observability/current/slo.html) and [dev docs](https://github.com/elastic/kibana/blob/main/x-pack/plugins/observability/dev_docs/slo.md) for more information.`, CreateContext: resourceSloCreate, UpdateContext: resourceSloUpdate, diff --git a/internal/kibana/spaces/schema.go b/internal/kibana/spaces/schema.go index 4b45988a6..80383a055 100644 --- a/internal/kibana/spaces/schema.go +++ b/internal/kibana/spaces/schema.go @@ -11,7 +11,7 @@ import ( // Schema defines the schema for the data source. func (d *dataSource) Schema(_ context.Context, _ datasource.SchemaRequest, resp *datasource.SchemaResponse) { resp.Schema = schema.Schema{ - Description: "Manages Kibana spaces", + Description: "Use this data source to retrieve and get information about all existing Kibana spaces. See https://www.elastic.co/guide/en/kibana/master/spaces-api-get-all.html", Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{ Description: "Generated ID for the spaces.", diff --git a/internal/kibana/synthetics/parameter/resource-description.md b/internal/kibana/synthetics/parameter/resource-description.md new file mode 100644 index 000000000..3760c2bb3 --- /dev/null +++ b/internal/kibana/synthetics/parameter/resource-description.md @@ -0,0 +1,4 @@ +Creates or updates a Kibana synthetics parameter. + +See [Working with secrets and sensitive values](https://www.elastic.co/docs/solutions/observability/synthetics/work-with-params-secrets) +and [API docs](https://www.elastic.co/docs/api/doc/kibana/group/endpoint-synthetics) diff --git a/internal/kibana/synthetics/parameter/schema.go b/internal/kibana/synthetics/parameter/schema.go index 00dc87321..cb59f46e8 100644 --- a/internal/kibana/synthetics/parameter/schema.go +++ b/internal/kibana/synthetics/parameter/schema.go @@ -1,6 +1,7 @@ package parameter import ( + _ "embed" "slices" "strings" @@ -21,6 +22,9 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" ) +//go:embed resource-description.md +var syntheticsParameterDescription string + type tfModelV0 struct { ID types.String `tfsdk:"id"` Key types.String `tfsdk:"key"` @@ -32,7 +36,7 @@ type tfModelV0 struct { func parameterSchema() schema.Schema { return schema.Schema{ - MarkdownDescription: "Synthetics parameter config, see https://www.elastic.co/docs/api/doc/kibana/group/endpoint-synthetics for more details", + MarkdownDescription: syntheticsParameterDescription, Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{ Computed: true, diff --git a/internal/kibana/synthetics/private_location/resource-description.md b/internal/kibana/synthetics/private_location/resource-description.md new file mode 100644 index 000000000..63dcca1bd --- /dev/null +++ b/internal/kibana/synthetics/private_location/resource-description.md @@ -0,0 +1,4 @@ +Creates or updates a Kibana synthetics private location. + +See [Monitor via a private agent](https://www.elastic.co/guide/en/observability/current/synthetics-private-location.html#monitor-via-private-agent) +and [API docs](https://www.elastic.co/guide/en/kibana/current/create-private-location-api.html) diff --git a/internal/kibana/synthetics/private_location/schema.go b/internal/kibana/synthetics/private_location/schema.go index 26e31b1fb..ad55cc2f9 100644 --- a/internal/kibana/synthetics/private_location/schema.go +++ b/internal/kibana/synthetics/private_location/schema.go @@ -1,6 +1,7 @@ package private_location import ( + _ "embed" "strings" "github.com/disaster37/go-kibana-rest/v8/kbapi" @@ -24,7 +25,7 @@ type tfModelV0 struct { func privateLocationSchema() schema.Schema { return schema.Schema{ - MarkdownDescription: "Synthetics private location config, see https://www.elastic.co/guide/en/kibana/current/create-private-location-api.html for more details", + MarkdownDescription: syntheticsPrivateLocationDescription, Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{ Computed: true, @@ -98,3 +99,6 @@ func toModelV0(pLoc kbapi.PrivateLocation) tfModelV0 { Geo: synthetics.FromSyntheticGeoConfig(pLoc.Geo), } } + +//go:embed resource-description.md +var syntheticsPrivateLocationDescription string diff --git a/internal/kibana/synthetics/resource-description.md b/internal/kibana/synthetics/resource-description.md new file mode 100644 index 000000000..6814d5fa6 --- /dev/null +++ b/internal/kibana/synthetics/resource-description.md @@ -0,0 +1,12 @@ +Creates or updates a Kibana synthetics monitor. See [API docs](https://www.elastic.co/guide/en/kibana/current/add-monitor-api.html) + +## Supported monitor types + * `http` + * `tcp` + * `icmp` + * `browser` + +The monitor type is determined by the fields in the `suite` block. See the [API docs](https://www.elastic.co/guide/en/kibana/current/add-monitor-api.html#add-monitor-api-request-body) for more details on which fields are required for each monitor type. + +**NOTE:** Due-to nature of partial update API, reset values to defaults is not supported. +In case you would like to reset an optional monitor value, please set it explicitly or delete and create new monitor. \ No newline at end of file diff --git a/internal/kibana/synthetics/schema.go b/internal/kibana/synthetics/schema.go index 365a9ee90..b7024270b 100644 --- a/internal/kibana/synthetics/schema.go +++ b/internal/kibana/synthetics/schema.go @@ -2,6 +2,7 @@ package synthetics import ( "context" + _ "embed" "encoding/json" "fmt" "regexp" @@ -127,9 +128,12 @@ func GetCompositeId(id string) (*clients.CompositeId, diag.Diagnostics) { return compositeID, dg } +//go:embed resource-description.md +var monitorDescription string + func monitorConfigSchema() schema.Schema { return schema.Schema{ - MarkdownDescription: "Synthetics monitor config, see https://www.elastic.co/guide/en/kibana/current/add-monitor-api.html for more details. The monitor must have one of the following: http, tcp, icmp or browser.", + MarkdownDescription: monitorDescription, Attributes: map[string]schema.Attribute{ "id": schema.StringAttribute{ Computed: true, From 0291dccd030e59fbeee1c049817a800070f16aa9 Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Wed, 17 Sep 2025 10:04:10 +1000 Subject: [PATCH 3/6] Remove redundant templates --- .../resource.tf | 21 ++++++++++ .../resource2.tf | 18 --------- .../elasticsearch_enrich_policy.md.tmpl | 16 -------- .../elasticsearch_index_template.md.tmpl | 17 -------- .../elasticsearch_indices.md.tmpl | 17 -------- .../data-sources/elasticsearch_info.md.tmpl | 17 -------- ...sticsearch_ingest_processor_append.md.tmpl | 21 ---------- ...asticsearch_ingest_processor_bytes.md.tmpl | 22 ---------- ...sticsearch_ingest_processor_circle.md.tmpl | 20 ---------- ...arch_ingest_processor_community_id.md.tmpl | 23 ----------- ...ticsearch_ingest_processor_convert.md.tmpl | 28 ------------- ...elasticsearch_ingest_processor_csv.md.tmpl | 22 ---------- ...lasticsearch_ingest_processor_date.md.tmpl | 22 ---------- ...h_ingest_processor_date_index_name.md.tmpl | 23 ----------- ...ticsearch_ingest_processor_dissect.md.tmpl | 22 ---------- ...arch_ingest_processor_dot_expander.md.tmpl | 20 ---------- ...lasticsearch_ingest_processor_drop.md.tmpl | 20 ---------- ...sticsearch_ingest_processor_enrich.md.tmpl | 19 --------- ...lasticsearch_ingest_processor_fail.md.tmpl | 20 ---------- ...earch_ingest_processor_fingerprint.md.tmpl | 19 --------- ...ticsearch_ingest_processor_foreach.md.tmpl | 34 ---------------- ...asticsearch_ingest_processor_geoip.md.tmpl | 27 ------------- ...lasticsearch_ingest_processor_grok.md.tmpl | 25 ------------ ...lasticsearch_ingest_processor_gsub.md.tmpl | 20 ---------- ...search_ingest_processor_html_strip.md.tmpl | 20 ---------- ...lasticsearch_ingest_processor_join.md.tmpl | 20 ---------- ...lasticsearch_ingest_processor_json.md.tmpl | 19 --------- .../elasticsearch_ingest_processor_kv.md.tmpl | 20 ---------- ...csearch_ingest_processor_lowercase.md.tmpl | 20 ---------- ...ingest_processor_network_direction.md.tmpl | 40 ------------------- ...icsearch_ingest_processor_pipeline.md.tmpl | 22 ---------- ...ingest_processor_registered_domain.md.tmpl | 20 ---------- ...sticsearch_ingest_processor_remove.md.tmpl | 20 ---------- ...sticsearch_ingest_processor_rename.md.tmpl | 20 ---------- ...ticsearch_ingest_processor_reroute.md.tmpl | 20 ---------- ...sticsearch_ingest_processor_script.md.tmpl | 30 -------------- ...elasticsearch_ingest_processor_set.md.tmpl | 20 ---------- ...ingest_processor_set_security_user.md.tmpl | 20 ---------- ...lasticsearch_ingest_processor_sort.md.tmpl | 20 ---------- ...asticsearch_ingest_processor_split.md.tmpl | 20 ---------- ...lasticsearch_ingest_processor_trim.md.tmpl | 22 ---------- ...csearch_ingest_processor_uppercase.md.tmpl | 20 ---------- ...csearch_ingest_processor_uri_parts.md.tmpl | 20 ---------- ...csearch_ingest_processor_urldecode.md.tmpl | 20 ---------- ...search_ingest_processor_user_agent.md.tmpl | 23 ----------- .../elasticsearch_security_role.md.tmpl | 17 -------- ...lasticsearch_security_role_mapping.md.tmpl | 17 -------- .../elasticsearch_security_user.md.tmpl | 17 -------- .../elasticsearch_snapshot_repository.md.tmpl | 17 -------- .../fleet_enrollment_tokens.md.tmpl | 17 -------- .../data-sources/fleet_integration.md.tmpl | 26 ------------ .../kibana_action_connector.md.tmpl | 17 -------- .../data-sources/kibana_security_role.md.tmpl | 17 -------- templates/data-sources/kibana_spaces.md.tmpl | 17 -------- .../resources/apm_agent_configuration.md.tmpl | 23 ----------- .../elasticsearch_cluster_settings.md.tmpl | 17 -------- .../elasticsearch_component_template.md.tmpl | 23 ----------- .../elasticsearch_data_stream.md.tmpl | 23 ----------- ...lasticsearch_data_stream_lifecycle.md.tmpl | 23 ----------- .../elasticsearch_enrich_policy.md.tmpl | 23 ----------- .../resources/elasticsearch_index.md.tmpl | 28 ------------- .../elasticsearch_index_lifecycle.md.tmpl | 23 ----------- .../elasticsearch_index_template.md.tmpl | 23 ----------- .../elasticsearch_ingest_pipeline.md.tmpl | 31 -------------- .../elasticsearch_logstash_pipeline.md.tmpl | 23 ----------- .../resources/elasticsearch_script.md.tmpl | 23 ----------- .../elasticsearch_security_api_key.md.tmpl | 21 ---------- .../elasticsearch_security_role.md.tmpl | 23 ----------- ...lasticsearch_security_role_mapping.md.tmpl | 23 ----------- ...elasticsearch_security_system_user.md.tmpl | 18 --------- .../elasticsearch_security_user.md.tmpl | 23 ----------- .../elasticsearch_snapshot_lifecycle.md.tmpl | 23 ----------- .../elasticsearch_snapshot_repository.md.tmpl | 23 ----------- .../resources/elasticsearch_transform.md.tmpl | 25 ------------ .../resources/elasticsearch_watch.md.tmpl | 23 ----------- .../resources/fleet_agent_policy.md.tmpl | 23 ----------- templates/resources/fleet_integration.md.tmpl | 22 ---------- .../fleet_integration_policy.md.tmpl | 32 --------------- templates/resources/fleet_output.md.tmpl | 23 ----------- templates/resources/fleet_server_host.md.tmpl | 23 ----------- .../resources/kibana_action_connector.md.tmpl | 23 ----------- .../resources/kibana_alerting_rule.md.tmpl | 30 -------------- templates/resources/kibana_data_view.md.tmpl | 23 ----------- .../kibana_import_saved_objects.md.tmpl | 21 ---------- .../kibana_maintenance_window.md.tmpl | 23 ----------- .../resources/kibana_security_role.md.tmpl | 36 ++++++++++++----- templates/resources/kibana_slo.md.tmpl | 23 ----------- templates/resources/kibana_space.md.tmpl | 23 ----------- .../kibana_synthetics_monitor.md.tmpl | 34 ---------------- .../kibana_synthetics_parameter.md.tmpl | 25 ------------ ...kibana_synthetics_private_location.md.tmpl | 25 ------------ 91 files changed, 47 insertions(+), 1978 deletions(-) delete mode 100644 examples/resources/elasticstack_elasticsearch_ingest_pipeline/resource2.tf delete mode 100644 templates/data-sources/elasticsearch_enrich_policy.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_index_template.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_indices.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_info.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_append.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_bytes.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_circle.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_community_id.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_convert.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_csv.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_date.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_date_index_name.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_dissect.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_dot_expander.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_drop.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_enrich.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_fail.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_fingerprint.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_foreach.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_geoip.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_grok.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_gsub.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_html_strip.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_join.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_json.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_kv.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_lowercase.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_network_direction.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_pipeline.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_registered_domain.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_remove.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_rename.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_reroute.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_script.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_set.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_set_security_user.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_sort.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_split.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_trim.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_uppercase.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_uri_parts.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_urldecode.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_ingest_processor_user_agent.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_security_role.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_security_role_mapping.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_security_user.md.tmpl delete mode 100644 templates/data-sources/elasticsearch_snapshot_repository.md.tmpl delete mode 100644 templates/data-sources/fleet_enrollment_tokens.md.tmpl delete mode 100644 templates/data-sources/fleet_integration.md.tmpl delete mode 100644 templates/data-sources/kibana_action_connector.md.tmpl delete mode 100644 templates/data-sources/kibana_security_role.md.tmpl delete mode 100644 templates/data-sources/kibana_spaces.md.tmpl delete mode 100644 templates/resources/apm_agent_configuration.md.tmpl delete mode 100644 templates/resources/elasticsearch_cluster_settings.md.tmpl delete mode 100644 templates/resources/elasticsearch_component_template.md.tmpl delete mode 100644 templates/resources/elasticsearch_data_stream.md.tmpl delete mode 100644 templates/resources/elasticsearch_data_stream_lifecycle.md.tmpl delete mode 100644 templates/resources/elasticsearch_enrich_policy.md.tmpl delete mode 100644 templates/resources/elasticsearch_index.md.tmpl delete mode 100644 templates/resources/elasticsearch_index_lifecycle.md.tmpl delete mode 100644 templates/resources/elasticsearch_index_template.md.tmpl delete mode 100644 templates/resources/elasticsearch_ingest_pipeline.md.tmpl delete mode 100644 templates/resources/elasticsearch_logstash_pipeline.md.tmpl delete mode 100644 templates/resources/elasticsearch_script.md.tmpl delete mode 100644 templates/resources/elasticsearch_security_api_key.md.tmpl delete mode 100644 templates/resources/elasticsearch_security_role.md.tmpl delete mode 100644 templates/resources/elasticsearch_security_role_mapping.md.tmpl delete mode 100644 templates/resources/elasticsearch_security_system_user.md.tmpl delete mode 100644 templates/resources/elasticsearch_security_user.md.tmpl delete mode 100644 templates/resources/elasticsearch_snapshot_lifecycle.md.tmpl delete mode 100644 templates/resources/elasticsearch_snapshot_repository.md.tmpl delete mode 100644 templates/resources/elasticsearch_transform.md.tmpl delete mode 100644 templates/resources/elasticsearch_watch.md.tmpl delete mode 100644 templates/resources/fleet_agent_policy.md.tmpl delete mode 100644 templates/resources/fleet_integration.md.tmpl delete mode 100644 templates/resources/fleet_integration_policy.md.tmpl delete mode 100644 templates/resources/fleet_output.md.tmpl delete mode 100644 templates/resources/fleet_server_host.md.tmpl delete mode 100644 templates/resources/kibana_action_connector.md.tmpl delete mode 100644 templates/resources/kibana_alerting_rule.md.tmpl delete mode 100644 templates/resources/kibana_data_view.md.tmpl delete mode 100644 templates/resources/kibana_import_saved_objects.md.tmpl delete mode 100644 templates/resources/kibana_maintenance_window.md.tmpl delete mode 100644 templates/resources/kibana_slo.md.tmpl delete mode 100644 templates/resources/kibana_space.md.tmpl delete mode 100644 templates/resources/kibana_synthetics_monitor.md.tmpl delete mode 100644 templates/resources/kibana_synthetics_parameter.md.tmpl delete mode 100644 templates/resources/kibana_synthetics_private_location.md.tmpl diff --git a/examples/resources/elasticstack_elasticsearch_ingest_pipeline/resource.tf b/examples/resources/elasticstack_elasticsearch_ingest_pipeline/resource.tf index f6e84f9f5..0f4fcffe7 100644 --- a/examples/resources/elasticstack_elasticsearch_ingest_pipeline/resource.tf +++ b/examples/resources/elasticstack_elasticsearch_ingest_pipeline/resource.tf @@ -2,6 +2,7 @@ provider "elasticstack" { elasticsearch {} } +// You can provide the ingest pipeline processors as plain JSON objects. resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { name = "my_ingest_pipeline" description = "My first ingest pipeline managed by Terraform" @@ -26,3 +27,23 @@ EOF , ] } + +// Or you can use the provided data sources to create the processor data sources. +data "elasticstack_elasticsearch_ingest_processor_set" "set_count" { + field = "count" + value = 1 +} + +data "elasticstack_elasticsearch_ingest_processor_json" "parse_string_source" { + field = "string_source" + target_field = "json_target" +} + +resource "elasticstack_elasticsearch_ingest_pipeline" "ingest" { + name = "set-parse" + + processors = [ + data.elasticstack_elasticsearch_ingest_processor_set.set_count.json, + data.elasticstack_elasticsearch_ingest_processor_json.parse_string_source.json + ] +} diff --git a/examples/resources/elasticstack_elasticsearch_ingest_pipeline/resource2.tf b/examples/resources/elasticstack_elasticsearch_ingest_pipeline/resource2.tf deleted file mode 100644 index 069260aa2..000000000 --- a/examples/resources/elasticstack_elasticsearch_ingest_pipeline/resource2.tf +++ /dev/null @@ -1,18 +0,0 @@ -data "elasticstack_elasticsearch_ingest_processor_set" "set_count" { - field = "count" - value = 1 -} - -data "elasticstack_elasticsearch_ingest_processor_json" "parse_string_source" { - field = "string_source" - target_field = "json_target" -} - -resource "elasticstack_elasticsearch_ingest_pipeline" "ingest" { - name = "set-parse" - - processors = [ - data.elasticstack_elasticsearch_ingest_processor_set.set_count.json, - data.elasticstack_elasticsearch_ingest_processor_json.parse_string_source.json - ] -} diff --git a/templates/data-sources/elasticsearch_enrich_policy.md.tmpl b/templates/data-sources/elasticsearch_enrich_policy.md.tmpl deleted file mode 100644 index ca961cdb9..000000000 --- a/templates/data-sources/elasticsearch_enrich_policy.md.tmpl +++ /dev/null @@ -1,16 +0,0 @@ ---- -subcategory: "Enrich" -page_title: "elasticstack_elasticsearch_enrich_policy Data Source - terraform-provider-elasticstack" -description: |- - Returns information about an enrich policy. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/get-enrich-policy-api.html ---- - -# Data Source: elasticstack_elasticsearch_enrich_policy - -Returns information about an enrich policy. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/get-enrich-policy-api.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_enrich_policy/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_index_template.md.tmpl b/templates/data-sources/elasticsearch_index_template.md.tmpl deleted file mode 100644 index 2c45cd386..000000000 --- a/templates/data-sources/elasticsearch_index_template.md.tmpl +++ /dev/null @@ -1,17 +0,0 @@ ---- -subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_index_template Data Source" -description: |- - Retrieves index template. ---- - -# Data Source: elasticstack_elasticsearch_index_template - -Use this data source to retrieve information about existing Elasticsearch index templates. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-template.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_index_template/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_indices.md.tmpl b/templates/data-sources/elasticsearch_indices.md.tmpl deleted file mode 100644 index 33075a2f0..000000000 --- a/templates/data-sources/elasticsearch_indices.md.tmpl +++ /dev/null @@ -1,17 +0,0 @@ ---- -subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_indices Data Source" -description: |- - Retrieves indices. ---- - -# Data Source: elasticstack_elasticsearch_indices - -Use this data source to retrieve and get information about existing Elasticsearch indices. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-index.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_indices/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_info.md.tmpl b/templates/data-sources/elasticsearch_info.md.tmpl deleted file mode 100644 index 0f964e43d..000000000 --- a/templates/data-sources/elasticsearch_info.md.tmpl +++ /dev/null @@ -1,17 +0,0 @@ ---- -subcategory: "Cluster" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_info Data Source" -description: |- - Gets information about the Elasticsearch cluster. ---- - -# Data Source: elasticstack_elasticsearch_info - -This data source provides the information about the configured Elasticsearch cluster - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_info/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_append.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_append.md.tmpl deleted file mode 100644 index 1d9572505..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_append.md.tmpl +++ /dev/null @@ -1,21 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_append Data Source" -description: |- - Helper data source to create a processor which appends one or more values to an existing array if the field already exists and it is an array. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_append - -Helper data source to which can be used to create a processor to append one or more values to an existing array if the field already exists and it is an array. -Converts a scalar to an array and appends one or more values to it if the field exists and it is a scalar. Creates an array containing the provided values if the field doesn’t exist. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/append-processor.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_append/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} - diff --git a/templates/data-sources/elasticsearch_ingest_processor_bytes.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_bytes.md.tmpl deleted file mode 100644 index ef26a59e5..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_bytes.md.tmpl +++ /dev/null @@ -1,22 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_bytes Data Source" -description: |- - Helper data source to create a processor which converts a human readable byte value (e.g. 1kb) to its value in bytes (e.g. 1024). ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_bytes - -Helper data source to which can be used to create a processor to convert a human readable byte value (e.g. 1kb) to its value in bytes (e.g. 1024). If the field is an array of strings, all members of the array will be converted. - -Supported human readable units are "b", "kb", "mb", "gb", "tb", "pb" case insensitive. An error will occur if the field is not a supported format or resultant value exceeds 2^63. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/bytes-processor.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_bytes/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} - diff --git a/templates/data-sources/elasticsearch_ingest_processor_circle.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_circle.md.tmpl deleted file mode 100644 index 4f3320884..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_circle.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_circle Data Source" -description: |- - Helper data source to create a processor which converts circle definitions of shapes to regular polygons which approximate them. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_circle - -Helper data source to which can be used to create a processor to convert circle definitions of shapes to regular polygons which approximate them. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-circle-processor.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_circle/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} - diff --git a/templates/data-sources/elasticsearch_ingest_processor_community_id.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_community_id.md.tmpl deleted file mode 100644 index cfde53513..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_community_id.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_community_id Data Source" -description: |- - Helper data source to create a processor which computes the Community ID for network flow data as defined in the Community ID Specification. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_community_id - -Helper data source to which can be used to create a processor to compute the Community ID for network flow data as defined in the [Community ID Specification](https://github.com/corelight/community-id-spec). -You can use a community ID to correlate network events related to a single flow. - -The community ID processor reads network flow data from related [Elastic Common Schema (ECS)](https://www.elastic.co/guide/en/ecs/1.12) fields by default. If you use the ECS, no configuration is required. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/community-id-processor.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_community_id/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} - diff --git a/templates/data-sources/elasticsearch_ingest_processor_convert.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_convert.md.tmpl deleted file mode 100644 index d7cadc6bd..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_convert.md.tmpl +++ /dev/null @@ -1,28 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_convert Data Source" -description: |- - Helper data source to create a processor which converts a field in the currently ingested document to a different type, such as converting a string to an integer. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_convert - -Helper data source to which can be used to convert a field in the currently ingested document to a different type, such as converting a string to an integer. If the field value is an array, all members will be converted. - -The supported types include: `integer`, `long`, `float`, `double`, `string`, `boolean`, `ip`, and `auto`. - -Specifying `boolean` will set the field to true if its string value is equal to true (ignore case), to false if its string value is equal to false (ignore case), or it will throw an exception otherwise. - -Specifying `ip` will set the target field to the value of `field` if it contains a valid IPv4 or IPv6 address that can be indexed into an IP field type. - -Specifying `auto` will attempt to convert the string-valued `field` into the closest non-string, non-IP type. For example, a field whose value is "true" will be converted to its respective boolean type: true. Do note that float takes precedence of double in auto. A value of "242.15" will "automatically" be converted to 242.15 of type `float`. If a provided field cannot be appropriately converted, the processor will still process successfully and leave the field value as-is. In such a case, `target_field` will be updated with the unconverted field value. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/convert-processor.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_convert/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} - diff --git a/templates/data-sources/elasticsearch_ingest_processor_csv.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_csv.md.tmpl deleted file mode 100644 index 19cb5536b..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_csv.md.tmpl +++ /dev/null @@ -1,22 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_csv Data Source" -description: |- - Helper data source to create a processor which extracts fields from CSV line out of a single text field within a document. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_csv - -Helper data source to which can be used to extract fields from CSV line out of a single text field within a document. Any empty field in CSV will be skipped. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/csv-processor.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_csv/data-source.tf" }} - -If the `trim` option is enabled then any whitespace in the beginning and in the end of each unquoted field will be trimmed. For example with configuration above, a value of A, B will result in field field2 having value {nbsp}B (with space at the beginning). If trim is enabled A, B will result in field field2 having value B (no whitespace). Quoted fields will be left untouched. - -{{ .SchemaMarkdown | trimspace }} - diff --git a/templates/data-sources/elasticsearch_ingest_processor_date.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_date.md.tmpl deleted file mode 100644 index 6ce419f61..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_date.md.tmpl +++ /dev/null @@ -1,22 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_date Data Source" -description: |- - Helper data source to create a processor which parses dates from fields, and then uses the date or timestamp as the timestamp for the document. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_date - -Helper data source to which can be used to parse dates from fields, and then uses the date or timestamp as the timestamp for the document. -By default, the date processor adds the parsed date as a new field called `@timestamp`. You can specify a different field by setting the `target_field` configuration parameter. Multiple date formats are supported as part of the same date processor definition. They will be used sequentially to attempt parsing the date field, in the same order they were defined as part of the processor definition. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/date-processor.html - -## Example Usage - -Here is an example that adds the parsed date to the `timestamp` field based on the `initial_date` field: - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_date/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_date_index_name.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_date_index_name.md.tmpl deleted file mode 100644 index 84b9e7529..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_date_index_name.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_date_index_name Data Source" -description: |- - Helper data source to create a processor which helps to point documents to the right time based index based on a date or timestamp field in a document by using the date math index name support. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_date_index_name - -The purpose of this processor is to point documents to the right time based index based on a date or timestamp field in a document by using the date math index name support. - -The processor sets the _index metadata field with a date math index name expression based on the provided index name prefix, a date or timestamp field in the documents being processed and the provided date rounding. - -First, this processor fetches the date or timestamp from a field in the document being processed. Optionally, date formatting can be configured on how the field’s value should be parsed into a date. Then this date, the provided index name prefix and the provided date rounding get formatted into a date math index name expression. Also here optionally date formatting can be specified on how the date should be formatted into a date math index name expression. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/date-index-name-processor.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_date_index_name/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_dissect.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_dissect.md.tmpl deleted file mode 100644 index d5e78b1fc..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_dissect.md.tmpl +++ /dev/null @@ -1,22 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_dissect Data Source" -description: |- - Helper data source to create a processor which extracts structured fields out of a single text field within a document. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_dissect - -Similar to the Grok Processor, dissect also extracts structured fields out of a single text field within a document. However unlike the Grok Processor, dissect does not use Regular Expressions. This allows dissect’s syntax to be simple and for some cases faster than the Grok Processor. - -Dissect matches a single text field against a defined pattern. - - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/dissect-processor.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_dissect/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_dot_expander.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_dot_expander.md.tmpl deleted file mode 100644 index 09197e378..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_dot_expander.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_dot_expander Data Source" -description: |- - Helper data source to create a processor which expands a field with dots into an object field. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_dot_expander - -Expands a field with dots into an object field. This processor allows fields with dots in the name to be accessible by other processors in the pipeline. Otherwise these fields can’t be accessed by any processor. - -See: elastic.co/guide/en/elasticsearch/reference/current/dot-expand-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_dot_expander/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_drop.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_drop.md.tmpl deleted file mode 100644 index 9902e4d70..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_drop.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_drop Data Source" -description: |- - Helper data source to create a processor which drops the document without raising any errors. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_drop - -Drops the document without raising any errors. This is useful to prevent the document from getting indexed based on some condition. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/drop-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_drop/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_enrich.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_enrich.md.tmpl deleted file mode 100644 index c31c70eec..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_enrich.md.tmpl +++ /dev/null @@ -1,19 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_enrich Data Source" -description: |- - Helper data source to create a processor which enriches documents with data from another index. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_enrich - -The enrich processor can enrich documents with data from another index. See enrich data section for more information about how to set this up. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-enriching-data.html and https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-processor.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_enrich/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_fail.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_fail.md.tmpl deleted file mode 100644 index f4f54453c..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_fail.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_fail Data Source" -description: |- - Helper data source to create a processor which raises an exception. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_fail - -Raises an exception. This is useful for when you expect a pipeline to fail and want to relay a specific message to the requester. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fail-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_fail/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_fingerprint.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_fingerprint.md.tmpl deleted file mode 100644 index ec74d337b..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_fingerprint.md.tmpl +++ /dev/null @@ -1,19 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_fingerprint Data Source" -description: |- - Helper data source to create a processor which computes a hash of the document’s content. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_fingerprint - -Computes a hash of the document’s content. You can use this hash for content fingerprinting. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fingerprint-processor.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_fingerprint/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_foreach.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_foreach.md.tmpl deleted file mode 100644 index 6777bf275..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_foreach.md.tmpl +++ /dev/null @@ -1,34 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_foreach Data Source" -description: |- - Helper data source to create a processor which runs an ingest processor on each element of an array or object. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_foreach - -Runs an ingest processor on each element of an array or object. - -All ingest processors can run on array or object elements. However, if the number of elements is unknown, it can be cumbersome to process each one in the same way. - -The `foreach` processor lets you specify a `field` containing array or object values and a `processor` to run on each element in the field. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/foreach-processor.html - - -### Access keys and values - -When iterating through an array or object, the foreach processor stores the current element’s value in the `_ingest._value` ingest metadata field. `_ingest._value` contains the entire element value, including any child fields. You can access child field values using dot notation on the `_ingest._value` field. - -When iterating through an object, the foreach processor also stores the current element’s key as a string in `_ingest._key`. - -You can access and change `_ingest._key` and `_ingest._value` in the processor. - - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_foreach/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_geoip.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_geoip.md.tmpl deleted file mode 100644 index 56299df05..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_geoip.md.tmpl +++ /dev/null @@ -1,27 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_geoip Data Source" -description: |- - Helper data source to create a processor which adds information about the geographical location of an IPv4 or IPv6 address. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_geoip - -The geoip processor adds information about the geographical location of an IPv4 or IPv6 address. - -By default, the processor uses the GeoLite2 City, GeoLite2 Country, and GeoLite2 ASN GeoIP2 databases from MaxMind, shared under the CC BY-SA 4.0 license. Elasticsearch automatically downloads updates for these databases from the Elastic GeoIP endpoint: https://geoip.elastic.co/v1/database. To get download statistics for these updates, use the GeoIP stats API. - -If your cluster can’t connect to the Elastic GeoIP endpoint or you want to manage your own updates, [see Manage your own GeoIP2 database updates](https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html#manage-geoip-database-updates). - -If Elasticsearch can’t connect to the endpoint for 30 days all updated databases will become invalid. Elasticsearch will stop enriching documents with geoip data and will add tags: ["_geoip_expired_database"] field instead. - - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_geoip/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_grok.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_grok.md.tmpl deleted file mode 100644 index 1f6aa62be..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_grok.md.tmpl +++ /dev/null @@ -1,25 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_grok Data Source" -description: |- - Helper data source to create a processor which extracts structured fields out of a single text field within a document. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_grok - -Extracts structured fields out of a single text field within a document. You choose which field to extract matched fields from, as well as the grok pattern you expect will match. A grok pattern is like a regular expression that supports aliased expressions that can be reused. - -This processor comes packaged with many [reusable patterns](https://github.com/elastic/elasticsearch/blob/master/libs/grok/src/main/resources/patterns). - -If you need help building patterns to match your logs, you will find the [Grok Debugger](https://www.elastic.co/guide/en/kibana/master/xpack-grokdebugger.html) tool quite useful! [The Grok Constructor](https://grokconstructor.appspot.com/) is also a useful tool. - - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/grok-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_grok/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_gsub.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_gsub.md.tmpl deleted file mode 100644 index cd3ee2af2..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_gsub.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_gsub Data Source" -description: |- - Helper data source to create a processor which converts a string field by applying a regular expression and a replacement. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_gsub - -Converts a string field by applying a regular expression and a replacement. If the field is an array of string, all members of the array will be converted. If any non-string values are encountered, the processor will throw an exception. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/gsub-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_gsub/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_html_strip.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_html_strip.md.tmpl deleted file mode 100644 index ab8264261..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_html_strip.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_html_strip Data Source" -description: |- - Helper data source to create a processor which removes HTML tags from the field. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_html_strip - -Removes HTML tags from the field. If the field is an array of strings, HTML tags will be removed from all members of the array. - -See: templates/data-sources/elasticsearch_ingest_processor_html_strip.md.tmpl - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_html_strip/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_join.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_join.md.tmpl deleted file mode 100644 index 84e2efe54..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_join.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_join Data Source" -description: |- - Helper data source to create a processor which joins each element of an array into a single string using a separator character between each element. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_join - -Joins each element of an array into a single string using a separator character between each element. Throws an error when the field is not an array. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/join-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_join/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_json.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_json.md.tmpl deleted file mode 100644 index 7ad37aadb..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_json.md.tmpl +++ /dev/null @@ -1,19 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_json Data Source" -description: |- - Helper data source to create a processor which converts a JSON string into a structured JSON object. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_json - -Converts a JSON string into a structured JSON object. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/json-processor.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_json/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_kv.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_kv.md.tmpl deleted file mode 100644 index edc9c83af..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_kv.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_kv Data Source" -description: |- - Helper data source to create a processor which helps automatically parse messages (or specific event fields) which are of the `foo=bar` variety. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_kv - -This processor helps automatically parse messages (or specific event fields) which are of the `foo=bar` variety. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/kv-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_kv/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_lowercase.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_lowercase.md.tmpl deleted file mode 100644 index 577494137..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_lowercase.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_lowercase Data Source" -description: |- - Helper data source to create a processor which converts a string to its lowercase equivalent. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_lowercase - -Converts a string to its lowercase equivalent. If the field is an array of strings, all members of the array will be converted. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/lowercase-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_lowercase/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_network_direction.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_network_direction.md.tmpl deleted file mode 100644 index 457cb8ca9..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_network_direction.md.tmpl +++ /dev/null @@ -1,40 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_network_direction Data Source" -description: |- - Helper data source to create a processor which calculates the network direction given a source IP address, destination IP address, and a list of internal networks. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_network_direction - -Calculates the network direction given a source IP address, destination IP address, and a list of internal networks. - -The network direction processor reads IP addresses from Elastic Common Schema (ECS) fields by default. If you use the ECS, only the `internal_networks` option must be specified. - - -One of either `internal_networks` or `internal_networks_field` must be specified. If `internal_networks_field` is specified, it follows the behavior specified by `ignore_missing`. - -### Supported named network rangese - -The named ranges supported for the internal_networks option are: - -* `loopback` - Matches loopback addresses in the range of 127.0.0.0/8 or ::1/128. -* `unicast` or `global_unicast` - Matches global unicast addresses defined in RFC 1122, RFC 4632, and RFC 4291 with the exception of the IPv4 broadcast address (255.255.255.255). This includes private address ranges. -* `multicast` - Matches multicast addresses. -* `interface_local_multicast` - Matches IPv6 interface-local multicast addresses. -* `link_local_unicast` - Matches link-local unicast addresses. -* `link_local_multicast` - Matches link-local multicast addresses. -* `private` - Matches private address ranges defined in RFC 1918 (IPv4) and RFC 4193 (IPv6). -* `public` - Matches addresses that are not loopback, unspecified, IPv4 broadcast, link local unicast, link local multicast, interface local multicast, or private. -* `unspecified` - Matches unspecified addresses (either the IPv4 address "0.0.0.0" or the IPv6 address "::"). - - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/network-direction-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_network_direction/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_pipeline.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_pipeline.md.tmpl deleted file mode 100644 index b9b9115c6..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_pipeline.md.tmpl +++ /dev/null @@ -1,22 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_pipeline Data Source" -description: |- - Helper data source to create a processor which executes another pipeline. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_pipeline - -Executes another pipeline. - -The name of the current pipeline can be accessed from the `_ingest.pipeline` ingest metadata key. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/pipeline-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_pipeline/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_registered_domain.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_registered_domain.md.tmpl deleted file mode 100644 index 976397938..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_registered_domain.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_registered_domain Data Source" -description: |- - Helper data source to create a processor which Extracts the registered domain, sub-domain, and top-level domain from a fully qualified domain name. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_registered_domain - -Extracts the registered domain (also known as the effective top-level domain or eTLD), sub-domain, and top-level domain from a fully qualified domain name (FQDN). Uses the registered domains defined in the Mozilla Public Suffix List. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/registered-domain-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_registered_domain/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_remove.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_remove.md.tmpl deleted file mode 100644 index 74a76d11c..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_remove.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_remove Data Source" -description: |- - Helper data source to create a processor which removes existing fields. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_remove - -Removes existing fields. If one field doesn’t exist, an exception will be thrown. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/remove-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_remove/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_rename.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_rename.md.tmpl deleted file mode 100644 index 207c5482f..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_rename.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_rename Data Source" -description: |- - Helper data source to create a processor which renames an existing field. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_rename - -Renames an existing field. If the field doesn’t exist or the new name is already used, an exception will be thrown. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/rename-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_rename/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_reroute.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_reroute.md.tmpl deleted file mode 100644 index 7ad31c055..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_reroute.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_reroute Data Source" -description: |- - Helper data source to create a processor which reroutes a document to a different data stream, index, or index alias. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_reroute - -Reroutes a document to a different data stream, index, or index alias. This processor is useful for routing documents based on data stream routing rules. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/reroute-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_reroute/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} \ No newline at end of file diff --git a/templates/data-sources/elasticsearch_ingest_processor_script.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_script.md.tmpl deleted file mode 100644 index 785ea671b..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_script.md.tmpl +++ /dev/null @@ -1,30 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_script Data Source" -description: |- - Helper data source to create a processor which runs an inline or stored script on incoming documents. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_script - -Runs an inline or stored script on incoming documents. The script runs in the ingest context. - -The script processor uses the script cache to avoid recompiling the script for each incoming document. To improve performance, ensure the script cache is properly sized before using a script processor in production. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/script-processor.html - -### Access source fields - -The script processor parses each incoming document’s JSON source fields into a set of maps, lists, and primitives. To access these fields with a Painless script, use the map access operator: `ctx['my-field']`. You can also use the shorthand `ctx.` syntax. - -### Access metadata fields - -You can also use a script processor to access metadata fields. - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_script/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_set.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_set.md.tmpl deleted file mode 100644 index 55c921ee4..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_set.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_set Data Source" -description: |- - Helper data source to create a processor which sets one field and associates it with the specified value. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_set - -Sets one field and associates it with the specified value. If the field already exists, its value will be replaced with the provided one. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/set-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_set/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_set_security_user.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_set_security_user.md.tmpl deleted file mode 100644 index a77bcd208..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_set_security_user.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_set_security_user Data Source" -description: |- - Helper data source to create a processor which sets user-related details from the current authenticated user to the current document by pre-processing the ingest. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_set_security_user - -Sets user-related details (such as `username`, `roles`, `email`, `full_name`, `metadata`, `api_key`, `realm` and `authentication_typ`e) from the current authenticated user to the current document by pre-processing the ingest. The `api_key` property exists only if the user authenticates with an API key. It is an object containing the id, name and metadata (if it exists and is non-empty) fields of the API key. The realm property is also an object with two fields, name and type. When using API key authentication, the realm property refers to the realm from which the API key is created. The `authentication_type property` is a string that can take value from `REALM`, `API_KEY`, `TOKEN` and `ANONYMOUS`. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-node-set-security-user-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_set_security_user/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_sort.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_sort.md.tmpl deleted file mode 100644 index de6f37a05..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_sort.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_sort Data Source" -description: |- - Helper data source to create a processor which sorts the elements of an array ascending or descending. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_sort - -Sorts the elements of an array ascending or descending. Homogeneous arrays of numbers will be sorted numerically, while arrays of strings or heterogeneous arrays of strings + numbers will be sorted lexicographically. Throws an error when the field is not an array. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/sort-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_sort/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_split.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_split.md.tmpl deleted file mode 100644 index ed7e3764b..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_split.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_split Data Source" -description: |- - Helper data source to create a processor which splits a field into an array using a separator character. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_split - -Splits a field into an array using a separator character. Only works on string fields. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/split-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_split/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_trim.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_trim.md.tmpl deleted file mode 100644 index 1c1222aa0..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_trim.md.tmpl +++ /dev/null @@ -1,22 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_trim Data Source" -description: |- - Helper data source to create a processor which trims whitespace from field. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_trim - -Trims whitespace from field. If the field is an array of strings, all members of the array will be trimmed. - -**NOTE:** This only works on leading and trailing whitespace. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/trim-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_trim/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_uppercase.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_uppercase.md.tmpl deleted file mode 100644 index 62a22f67d..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_uppercase.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_uppercase Data Source" -description: |- - Helper data source to create a processor which converts a string to its uppercase equivalent. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_uppercase - -Converts a string to its uppercase equivalent. If the field is an array of strings, all members of the array will be converted. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uppercase-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_uppercase/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_uri_parts.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_uri_parts.md.tmpl deleted file mode 100644 index 98ebe3fa8..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_uri_parts.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_uri_parts Data Source" -description: |- - Helper data source to create a processor which parses a Uniform Resource Identifier (URI) string and extracts its components as an object. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_uri_parts - -Parses a Uniform Resource Identifier (URI) string and extracts its components as an object. This URI object includes properties for the URI’s domain, path, fragment, port, query, scheme, user info, username, and password. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uri-parts-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_uri_parts/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_urldecode.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_urldecode.md.tmpl deleted file mode 100644 index 52a270fb3..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_urldecode.md.tmpl +++ /dev/null @@ -1,20 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_urldecode Data Source" -description: |- - Helper data source to create a processor which URL-decodes a string. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_urldecode - -URL-decodes a string. If the field is an array of strings, all members of the array will be decoded. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/urldecode-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_urldecode/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_ingest_processor_user_agent.md.tmpl b/templates/data-sources/elasticsearch_ingest_processor_user_agent.md.tmpl deleted file mode 100644 index 145a66a1d..000000000 --- a/templates/data-sources/elasticsearch_ingest_processor_user_agent.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_user_agent Data Source" -description: |- - Helper data source to create a processor which extracts details from the user agent string a browser sends with its web requests. ---- - -# Data Source: elasticstack_elasticsearch_ingest_processor_user_agent - -The `user_agent` processor extracts details from the user agent string a browser sends with its web requests. This processor adds this information by default under the `user_agent` field. - -The ingest-user-agent module ships by default with the regexes.yaml made available by uap-java with an Apache 2.0 license. For more details see https://github.com/ua-parser/uap-core. - - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/user-agent-processor.html - - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_ingest_processor_user_agent/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_security_role.md.tmpl b/templates/data-sources/elasticsearch_security_role.md.tmpl deleted file mode 100644 index 73d4a6cee..000000000 --- a/templates/data-sources/elasticsearch_security_role.md.tmpl +++ /dev/null @@ -1,17 +0,0 @@ ---- -subcategory: "Security" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_security_role Data Source" -description: |- - Retrieves roles in the native realm. ---- - -# Data Source: elasticstack_elasticsearch_security_role - -Use this data source to get information about an existing Elasticsearch role. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_security_role/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_security_role_mapping.md.tmpl b/templates/data-sources/elasticsearch_security_role_mapping.md.tmpl deleted file mode 100644 index 305d04aa8..000000000 --- a/templates/data-sources/elasticsearch_security_role_mapping.md.tmpl +++ /dev/null @@ -1,17 +0,0 @@ ---- -subcategory: "Security" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_security_role_mapping Data Source" -description: |- - Retrieves role mappings. ---- - -# Data Source: elasticstack_elasticsearch_security_role_mapping - -Retrieves role mappings. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role-mapping.html - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_security_role_mapping/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_security_user.md.tmpl b/templates/data-sources/elasticsearch_security_user.md.tmpl deleted file mode 100644 index 6fdce1d42..000000000 --- a/templates/data-sources/elasticsearch_security_user.md.tmpl +++ /dev/null @@ -1,17 +0,0 @@ ---- -subcategory: "Security" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_security_user Data Source" -description: |- - Gets information about Elasticsearch user. ---- - -# Data Source: elasticstack_elasticsearch_security_user - -Use this data source to get information about existing Elasticsearch user. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-user.html". - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_security_user/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/elasticsearch_snapshot_repository.md.tmpl b/templates/data-sources/elasticsearch_snapshot_repository.md.tmpl deleted file mode 100644 index 5a599a697..000000000 --- a/templates/data-sources/elasticsearch_snapshot_repository.md.tmpl +++ /dev/null @@ -1,17 +0,0 @@ ---- -subcategory: "Snapshot" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_snapshot_repository Data Source" -description: |- - Gets information about the registered snapshot repositories. ---- - -# Data Source: elasticstack_elasticsearch_snapshot_repository - -This data source provides the information about the registered snaphosts repositories - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_elasticsearch_snapshot_repository/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/fleet_enrollment_tokens.md.tmpl b/templates/data-sources/fleet_enrollment_tokens.md.tmpl deleted file mode 100644 index c3fcd1871..000000000 --- a/templates/data-sources/fleet_enrollment_tokens.md.tmpl +++ /dev/null @@ -1,17 +0,0 @@ ---- -subcategory: "Fleet" -layout: "" -page_title: "Elasticstack: elasticstack_fleet_enrollment_tokens Data Source" -description: |- - Gets information about Fleet Enrollment Tokens. See https://www.elastic.co/guide/en/fleet/current/fleet-enrollment-tokens.html ---- - -# Data Source: elasticstack_fleet_enrollment_tokens - -This data source provides information about Fleet Enrollment Tokens. - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_fleet_enrollment_tokens/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/fleet_integration.md.tmpl b/templates/data-sources/fleet_integration.md.tmpl deleted file mode 100644 index e212bb541..000000000 --- a/templates/data-sources/fleet_integration.md.tmpl +++ /dev/null @@ -1,26 +0,0 @@ ---- -subcategory: "Fleet" -layout: "" -page_title: "Elasticstack: elasticstack_fleet_integration Data Source" -description: |- - Gets information about a Fleet integration package. ---- - -# Data Source: elasticstack_fleet_integration - -This data source provides information about a Fleet integration package. Currently, -the data source will retrieve the latest available version of the package. Version -selection is determined by the Fleet API, which is currently based on semantic -versioning. - -By default, the highest GA release version will be selected. If a -package is not GA (the version is below 1.0.0) or if a new non-GA version of the -package is to be selected (i.e., the GA version of the package is 1.5.0, but there's -a new 1.5.1-beta version available), then the `prerelease` parameter in the plan -should be set to `true`. - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_fleet_integration/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/kibana_action_connector.md.tmpl b/templates/data-sources/kibana_action_connector.md.tmpl deleted file mode 100644 index ce1432540..000000000 --- a/templates/data-sources/kibana_action_connector.md.tmpl +++ /dev/null @@ -1,17 +0,0 @@ ---- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_action_connector Data Source" -description: |- - Retrieve a specific action connector role. See https://www.elastic.co/guide/en/kibana/current/get-all-connectors-api.html. ---- - -# Data Source: elasticstack_kibana_action_connector - -Use this data source to get information about an existing action connector. - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_kibana_action_connector/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/kibana_security_role.md.tmpl b/templates/data-sources/kibana_security_role.md.tmpl deleted file mode 100644 index 176c583af..000000000 --- a/templates/data-sources/kibana_security_role.md.tmpl +++ /dev/null @@ -1,17 +0,0 @@ ---- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_security_role Data Source" -description: |- - Retrieve a specific Kibana role. See https://www.elastic.co/guide/en/kibana/master/role-management-specific-api-get.html ---- - -# Data Source: elasticstack_kibana_security_role - -Use this data source to get information about an existing Kibana role. - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_kibana_security_role/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/data-sources/kibana_spaces.md.tmpl b/templates/data-sources/kibana_spaces.md.tmpl deleted file mode 100644 index 4ad23929c..000000000 --- a/templates/data-sources/kibana_spaces.md.tmpl +++ /dev/null @@ -1,17 +0,0 @@ ---- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_spaces Data Source" -description: |- - Retrieve all Kibana spaces. See https://www.elastic.co/guide/en/kibana/master/spaces-api-get-all.html ---- - -# Data Source: elasticstack_kibana_spaces - -Use this data source to retrieve and get information about all existing Kibana spaces. - -## Example Usage - -{{ tffile "examples/data-sources/elasticstack_kibana_spaces/data-source.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/resources/apm_agent_configuration.md.tmpl b/templates/resources/apm_agent_configuration.md.tmpl deleted file mode 100644 index 103e4b62e..000000000 --- a/templates/resources/apm_agent_configuration.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_apm_agent_configuration Resource" -description: |- - Creates or updates an APM agent configuration ---- - -# Resource: elasticstack_apm_agent_configuration - -Creates or updates an APM agent configuration. See https://www.elastic.co/docs/solutions/observability/apm/apm-agent-central-configuration - -## Example Usage - -{{ tffile "examples/resources/elasticstack_apm_agent_configuration/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_apm_agent_configuration/import.sh" }} diff --git a/templates/resources/elasticsearch_cluster_settings.md.tmpl b/templates/resources/elasticsearch_cluster_settings.md.tmpl deleted file mode 100644 index d0a782927..000000000 --- a/templates/resources/elasticsearch_cluster_settings.md.tmpl +++ /dev/null @@ -1,17 +0,0 @@ ---- -subcategory: "Cluster" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_cluster_settings Resource" -description: |- - Updates cluster-wide settings. ---- - -# Resource: elasticstack_elasticsearch_cluster_settings - -Updates cluster-wide settings. If the Elasticsearch security features are enabled, you must have the manage cluster privilege to use this API. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_cluster_settings/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/resources/elasticsearch_component_template.md.tmpl b/templates/resources/elasticsearch_component_template.md.tmpl deleted file mode 100644 index a1e9d7830..000000000 --- a/templates/resources/elasticsearch_component_template.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_component_template Resource" -description: |- - Creates or updates a component template. ---- - -# Resource: elasticstack_elasticsearch_component_template - -Creates or updates a component template. Component templates are building blocks for constructing index templates that specify index mappings, settings, and aliases. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-component-template.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_component_template/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_component_template/import.sh" }} diff --git a/templates/resources/elasticsearch_data_stream.md.tmpl b/templates/resources/elasticsearch_data_stream.md.tmpl deleted file mode 100644 index 4d7759bbb..000000000 --- a/templates/resources/elasticsearch_data_stream.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_data_stream Resource" -description: |- - Manages Elasticsearch Data Streams ---- - -# Resource: elasticstack_elasticsearch_data_stream - -Manages data streams. This resource can create, delete and show the information about the created data stream. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_data_stream/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_data_stream/import.sh" }} diff --git a/templates/resources/elasticsearch_data_stream_lifecycle.md.tmpl b/templates/resources/elasticsearch_data_stream_lifecycle.md.tmpl deleted file mode 100644 index c45c92f85..000000000 --- a/templates/resources/elasticsearch_data_stream_lifecycle.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Index" -layout: "" -page_title: "Elasticstack: {{ .Name }} {{ .Type }}" -description: |- - Manages Lifecycle for Elasticsearch Data Streams ---- - -# {{ .Type }}: {{ .Name }} - -Configures the data stream lifecycle for the targeted data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_data_stream_lifecycle/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_data_stream_lifecycle/import.sh" }} diff --git a/templates/resources/elasticsearch_enrich_policy.md.tmpl b/templates/resources/elasticsearch_enrich_policy.md.tmpl deleted file mode 100644 index 9f5197192..000000000 --- a/templates/resources/elasticsearch_enrich_policy.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Enrich" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_enrich_policy" -description: |- - Managing Elasticsearch enrich policies, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-apis.html ---- - -# Resource: elasticstack_elasticsearch_enrich_policy - -Creates or updates enrich policies, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-apis.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_enrich_policy/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_enrich_policy/import.sh" }} \ No newline at end of file diff --git a/templates/resources/elasticsearch_index.md.tmpl b/templates/resources/elasticsearch_index.md.tmpl deleted file mode 100644 index c82cb37cc..000000000 --- a/templates/resources/elasticsearch_index.md.tmpl +++ /dev/null @@ -1,28 +0,0 @@ ---- -subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_index Resource" -description: |- - Creates or updates an index. ---- - -# Resource: elasticstack_elasticsearch_index - -Creates or updates an index. This resource can define settings, mappings and aliases. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_index/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -**NOTE:** While importing index resource, keep in mind, that some of the default index settings will be imported into the TF state too. -You can later adjust the index configuration to account for those imported settings. - -Some of the default settings, which could be imported are: `index.number_of_replicas`, `index.number_of_shards` and `index.routing.allocation.include._tier_preference`. - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_index/import.sh" }} diff --git a/templates/resources/elasticsearch_index_lifecycle.md.tmpl b/templates/resources/elasticsearch_index_lifecycle.md.tmpl deleted file mode 100644 index 9ec22802d..000000000 --- a/templates/resources/elasticsearch_index_lifecycle.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_index_lifecycle Resource" -description: |- - Creates or updates lifecycle policy. ---- - -# Resource: elasticstack_elasticsearch_index_lifecycle - -Creates or updates lifecycle policy. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-put-lifecycle.html and https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-index-lifecycle.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_index_lifecycle/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_index_lifecycle/import.sh" }} diff --git a/templates/resources/elasticsearch_index_template.md.tmpl b/templates/resources/elasticsearch_index_template.md.tmpl deleted file mode 100644 index 90b1d5a9f..000000000 --- a/templates/resources/elasticsearch_index_template.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_index_template Resource" -description: |- - Creates or updates an index template. ---- - -# Resource: elasticstack_elasticsearch_index_template - -Creates or updates an index template. Index templates define settings, mappings, and aliases that can be applied automatically to new indices. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-put-template.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_index_template/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_index_template/import.sh" }} diff --git a/templates/resources/elasticsearch_ingest_pipeline.md.tmpl b/templates/resources/elasticsearch_ingest_pipeline.md.tmpl deleted file mode 100644 index 0a4b83f66..000000000 --- a/templates/resources/elasticsearch_ingest_pipeline.md.tmpl +++ /dev/null @@ -1,31 +0,0 @@ ---- -subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_pipeline Resource" -description: |- - Manages Ingest Pipelines ---- - -# Resource: elasticstack_elasticsearch_ingest_pipeline - -Use ingest APIs to manage tasks and resources related to ingest pipelines and processors. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-apis.html - -## Example Usage - -You can provide your custom JSON definitions for the ingest processors: - -{{ tffile "examples/resources/elasticstack_elasticsearch_ingest_pipeline/resource.tf" }} - - -Or you can use data sources and Terraform declarative way of defining the ingest processors: - -{{ tffile "examples/resources/elasticstack_elasticsearch_ingest_pipeline/resource2.tf" }} - - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_ingest_pipeline/import.sh" }} diff --git a/templates/resources/elasticsearch_logstash_pipeline.md.tmpl b/templates/resources/elasticsearch_logstash_pipeline.md.tmpl deleted file mode 100644 index a98d8d849..000000000 --- a/templates/resources/elasticsearch_logstash_pipeline.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Logstash" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_logstash_pipeline Resource" -description: |- - Creates or updates centrally managed logstash pipelines. ---- - -# Resource: elasticstack_elasticsearch_logstash_pipeline - -Creates or updates centrally managed logstash pipelines. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-apis.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_logstash_pipeline/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_logstash_pipeline/import.sh" }} diff --git a/templates/resources/elasticsearch_script.md.tmpl b/templates/resources/elasticsearch_script.md.tmpl deleted file mode 100644 index 81f0b1b90..000000000 --- a/templates/resources/elasticsearch_script.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Cluster" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_script Resource" -description: |- - Creates or updates a stored script or search template. ---- - -# Resource: elasticstack_elasticsearch_script - -Creates or updates a stored script or search template. See https://www.elastic.co/guide/en/elasticsearch/reference/current/create-stored-script-api.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_script/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_script/import.sh" }} diff --git a/templates/resources/elasticsearch_security_api_key.md.tmpl b/templates/resources/elasticsearch_security_api_key.md.tmpl deleted file mode 100644 index 460e40c9b..000000000 --- a/templates/resources/elasticsearch_security_api_key.md.tmpl +++ /dev/null @@ -1,21 +0,0 @@ ---- -subcategory: "Security" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_security_api_key Resource" -description: |- - Creates an API key for access without requiring basic authentication. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html ---- - -# elasticstack_elasticsearch_security_api_key (Resource) - -Creates an API key for access without requiring basic authentication. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_security_api_key/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is not supported due to the generated API key only being visible on create. diff --git a/templates/resources/elasticsearch_security_role.md.tmpl b/templates/resources/elasticsearch_security_role.md.tmpl deleted file mode 100644 index 237236e40..000000000 --- a/templates/resources/elasticsearch_security_role.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Security" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_security_role Resource" -description: |- - Adds and updates roles in the native realm. ---- - -# Resource: elasticstack_elasticsearch_security_role - -Adds and updates roles in the native realm. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-role.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_security_role/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_security_role/import.sh" }} diff --git a/templates/resources/elasticsearch_security_role_mapping.md.tmpl b/templates/resources/elasticsearch_security_role_mapping.md.tmpl deleted file mode 100644 index aa4df23dc..000000000 --- a/templates/resources/elasticsearch_security_role_mapping.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Security" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_security_role_mapping Resource" -description: |- - Manage role mappings. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-role-mapping.html ---- - -# Resource: elasticstack_elasticsearch_security_role_mapping - -Manage role mappings. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-role-mapping.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_security_role_mapping/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_security_role_mapping/import.sh" }} diff --git a/templates/resources/elasticsearch_security_system_user.md.tmpl b/templates/resources/elasticsearch_security_system_user.md.tmpl deleted file mode 100644 index ca2fa49b9..000000000 --- a/templates/resources/elasticsearch_security_system_user.md.tmpl +++ /dev/null @@ -1,18 +0,0 @@ ---- -subcategory: "Security" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_security_system_user Resource" -description: |- - Updates system user's password and enablement. ---- - -# Resource: elasticstack_elasticsearch_security_system_user - -Updates system user's password and enablement. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/built-in-users.html -Since this resource is to manage built-in users, destroy will not delete the underlying Elasticsearch and will only remove it from Terraform state. - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_security_system_user/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/resources/elasticsearch_security_user.md.tmpl b/templates/resources/elasticsearch_security_user.md.tmpl deleted file mode 100644 index 58ecf4d51..000000000 --- a/templates/resources/elasticsearch_security_user.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Security" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_security_user Resource" -description: |- - Adds and updates users in the native realm. ---- - -# Resource: elasticstack_elasticsearch_security_user - -Adds and updates users in the native realm. These users are commonly referred to as native users. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-user.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_security_user/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_security_user/import.sh" }} diff --git a/templates/resources/elasticsearch_snapshot_lifecycle.md.tmpl b/templates/resources/elasticsearch_snapshot_lifecycle.md.tmpl deleted file mode 100644 index fbb4b41d9..000000000 --- a/templates/resources/elasticsearch_snapshot_lifecycle.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Snapshot" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_snapshot_lifecycle Resource" -description: |- - Creates or updates a snapshot lifecycle policy. ---- - -# Resource: elasticstack_elasticsearch_snapshot_lifecycle - -Creates or updates a snapshot lifecycle policy. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/slm-api-put-policy.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_snapshot_lifecycle/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_snapshot_lifecycle/import.sh" }} diff --git a/templates/resources/elasticsearch_snapshot_repository.md.tmpl b/templates/resources/elasticsearch_snapshot_repository.md.tmpl deleted file mode 100644 index e2f155e31..000000000 --- a/templates/resources/elasticsearch_snapshot_repository.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Snapshot" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_snapshot_repository Resource" -description: |- - Registers or updates a snapshot repository. ---- - -# Resource: elasticstack_elasticsearch_snapshot_repository - -Registers or updates a snapshot repository. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/put-snapshot-repo-api.html and https://www.elastic.co/guide/en/elasticsearch/reference/current/snapshots-register-repository.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_snapshot_repository/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_snapshot_repository/import.sh" }} diff --git a/templates/resources/elasticsearch_transform.md.tmpl b/templates/resources/elasticsearch_transform.md.tmpl deleted file mode 100644 index 6c997cef8..000000000 --- a/templates/resources/elasticsearch_transform.md.tmpl +++ /dev/null @@ -1,25 +0,0 @@ ---- -subcategory: "Transform" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_transform Resource" -description: |- - Manages transforms. Transforms enable you to convert existing Elasticsearch indices into summarized indices. ---- - -# Resource: elasticstack_elasticsearch_transform - -Creates, updates, starts and stops a transform. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/transforms.html - -**NOTE:** Some transform settings require a minimum Elasticsearch version. Such settings will be ignored when applied to versions below the required one (a warning will be issued in the logs). - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_transform/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_transform/import.sh" }} diff --git a/templates/resources/elasticsearch_watch.md.tmpl b/templates/resources/elasticsearch_watch.md.tmpl deleted file mode 100644 index 587fbecb2..000000000 --- a/templates/resources/elasticsearch_watch.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Watcher" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_watch Resource" -description: |- - Adds and manages a Watch. ---- - -# Resource: elasticstack_elasticsearch_watch - -Adds and manages a Watch. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_elasticsearch_watch/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_elasticsearch_watch/import.sh" }} diff --git a/templates/resources/fleet_agent_policy.md.tmpl b/templates/resources/fleet_agent_policy.md.tmpl deleted file mode 100644 index 2c2ade221..000000000 --- a/templates/resources/fleet_agent_policy.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Fleet" -layout: "" -page_title: "Elasticstack: elasticstack_fleet_agent_policy Resource" -description: |- - Creates or updates a Fleet Agent Policy. ---- - -# Resource: elasticstack_fleet_agent_policy - -Creates or updates a Fleet Agent Policy. See https://www.elastic.co/guide/en/fleet/current/fleet-api-docs.html#create-agent-policy-api - -## Example Usage - -{{ tffile "examples/resources/elasticstack_fleet_agent_policy/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_fleet_agent_policy/import.sh" }} diff --git a/templates/resources/fleet_integration.md.tmpl b/templates/resources/fleet_integration.md.tmpl deleted file mode 100644 index e0e089047..000000000 --- a/templates/resources/fleet_integration.md.tmpl +++ /dev/null @@ -1,22 +0,0 @@ ---- -subcategory: "Fleet" -layout: "" -page_title: "Elasticstack: elasticstack_fleet_integration Resource" -description: |- - Installs or uninstalls a Fleet integration package. ---- - -# Resource: elasticstack_fleet_integration - -Installs or uninstalls a Fleet integration package. The Kibana Fleet UI can be -used to view available packages. Additional information for managing integration -packages can be found [here](https://www.elastic.co/guide/en/fleet/current/install-uninstall-integration-assets.html). - -To prevent the package from being uninstalled when the resource is destroyed, -set `skip_destroy` to `true`. - -## Example Usage - -{{ tffile "examples/resources/elasticstack_fleet_integration/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} diff --git a/templates/resources/fleet_integration_policy.md.tmpl b/templates/resources/fleet_integration_policy.md.tmpl deleted file mode 100644 index 7e9274d62..000000000 --- a/templates/resources/fleet_integration_policy.md.tmpl +++ /dev/null @@ -1,32 +0,0 @@ ---- -subcategory: "Fleet" -layout: "" -page_title: "Elasticstack: elasticstack_fleet_integration_policy Resource" -description: |- - Creates or updates a Fleet Integration Policy. ---- - -# Resource: elasticstack_fleet_integration_policy - -Creates or updates a Fleet Integration Policy. - -It is highly recommended that all inputs and streams are provided in the -Terraform plan, even if some are disabled. Otherwise, differences may appear -between what is in the plan versus what is returned by the Fleet API. - -The [Kibana Fleet UI](https://www.elastic.co/guide/en/fleet/current/add-integration-to-policy.html) -can be used as a reference for what data needs to be provided. Instead of saving -a new integration configuration, the API request can be previewed, showing what -values need to be provided for inputs and their streams. - -## Example Usage - -{{ tffile "examples/resources/elasticstack_fleet_integration_policy/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_fleet_integration_policy/import.sh" }} diff --git a/templates/resources/fleet_output.md.tmpl b/templates/resources/fleet_output.md.tmpl deleted file mode 100644 index d709042e6..000000000 --- a/templates/resources/fleet_output.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Fleet" -layout: "" -page_title: "Elasticstack: elasticstack_fleet_output Resource" -description: |- - Creates or updates a Fleet Output. ---- - -# Resource: elasticstack_fleet_output - -Creates or updates a Fleet Output. - -## Example Usage - -{{ tffile "examples/resources/elasticstack_fleet_output/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_fleet_output/import.sh" }} diff --git a/templates/resources/fleet_server_host.md.tmpl b/templates/resources/fleet_server_host.md.tmpl deleted file mode 100644 index fe47c7c0c..000000000 --- a/templates/resources/fleet_server_host.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Fleet" -layout: "" -page_title: "Elasticstack: elasticstack_fleet_server_host Resource" -description: |- - Creates or updates a Fleet Server Host. ---- - -# Resource: elasticstack_fleet_server_host - -Creates or updates a Fleet Server Host. - -## Example Usage - -{{ tffile "examples/resources/elasticstack_fleet_server_host/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_fleet_server_host/import.sh" }} diff --git a/templates/resources/kibana_action_connector.md.tmpl b/templates/resources/kibana_action_connector.md.tmpl deleted file mode 100644 index 6fbf646c4..000000000 --- a/templates/resources/kibana_action_connector.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_action_connector Resource" -description: |- - Creates or updates a Kibana action connector. See https://www.elastic.co/guide/en/kibana/current/action-types.html ---- - -# Resource: elasticstack_kibana_action_connector - -Creates or updates a Kibana action connector. See https://www.elastic.co/guide/en/kibana/current/action-types.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_kibana_action_connector/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_kibana_action_connector/import.sh" }} diff --git a/templates/resources/kibana_alerting_rule.md.tmpl b/templates/resources/kibana_alerting_rule.md.tmpl deleted file mode 100644 index b259e8130..000000000 --- a/templates/resources/kibana_alerting_rule.md.tmpl +++ /dev/null @@ -1,30 +0,0 @@ ---- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_alerting_rule Resource" -description: |- - Creates or updates a Kibana alerting rule. ---- - -# Resource: elasticstack_kibana_alerting_rule - -Creates or updates a Kibana alerting rule. See https://www.elastic.co/guide/en/kibana/current/create-and-manage-rules.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_kibana_alerting_rule/resource.tf" }} - - -**NOTE:** `api_key` authentication is only supported for alerting rule resources from version 8.8.0 of the Elastic stack. Using an `api_key` will result in an error message like: - -``` -Could not create API key - Unsupported scheme "ApiKey" for granting API Key -``` - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_kibana_alerting_rule/import.sh" }} diff --git a/templates/resources/kibana_data_view.md.tmpl b/templates/resources/kibana_data_view.md.tmpl deleted file mode 100644 index 56de1c423..000000000 --- a/templates/resources/kibana_data_view.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_data_view Resource" -description: |- - Manages Kibana data views. ---- - -# Resource: elasticstack_kibana_data_view - -Creates and manages Kibana [data views](https://www.elastic.co/guide/en/kibana/current/data-views-api.html) - -## Example Usage - -{{ tffile "examples/resources/elasticstack_kibana_data_view/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_kibana_data_view/import.sh" }} diff --git a/templates/resources/kibana_import_saved_objects.md.tmpl b/templates/resources/kibana_import_saved_objects.md.tmpl deleted file mode 100644 index 7679d6f13..000000000 --- a/templates/resources/kibana_import_saved_objects.md.tmpl +++ /dev/null @@ -1,21 +0,0 @@ ---- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_import_saved_objects Resource" -description: |- - Create sets of Kibana saved objects from a file created by the export API. ---- - -# Resource: elasticstack_kibana_import_saved_objects - -Create sets of Kibana saved objects from a file created by the export API. See https://www.elastic.co/guide/en/kibana/current/saved-objects-api-import.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_kibana_import_saved_objects/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is not supported. diff --git a/templates/resources/kibana_maintenance_window.md.tmpl b/templates/resources/kibana_maintenance_window.md.tmpl deleted file mode 100644 index e7fddee61..000000000 --- a/templates/resources/kibana_maintenance_window.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_maintenance_window Resource" -description: |- - Manages Kibana maintenance windows. ---- - -# Resource: elasticstack_kibana_maintenance_window - -Creates and manages Kibana [maintenance windows](https://www.elastic.co/docs/api/doc/kibana/group/endpoint-maintenance-window) - -## Example Usage - -{{ tffile "examples/resources/elasticstack_kibana_maintenance_window/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_kibana_maintenance_window/import.sh" }} diff --git a/templates/resources/kibana_security_role.md.tmpl b/templates/resources/kibana_security_role.md.tmpl index ebac85587..5cf31c778 100644 --- a/templates/resources/kibana_security_role.md.tmpl +++ b/templates/resources/kibana_security_role.md.tmpl @@ -1,18 +1,14 @@ --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "{{.Name}} {{.Type}} - {{.RenderedProviderName}}" subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_security_role Resource" description: |- - Creates or updates a Kibana role. +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} --- -# Resource: elasticstack_kibana_security_role +# {{.Name}} ({{.Type}}) -Creates or updates a Kibana role. See https://www.elastic.co/guide/en/kibana/master/role-management-api-put.html - -For Features, see: https://www.elastic.co/guide/en/kibana/current/features-api-get.html - -For Security Privileges, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/security-privileges.html +{{ .Description | trimspace }} ## Example Usage @@ -25,9 +21,29 @@ For Security Privileges, see: https://www.elastic.co/guide/en/elasticsearch/refe {{ tffile "examples/resources/elasticstack_kibana_security_role/resource-with-feature.tf" }} {{ .SchemaMarkdown | trimspace }} +{{- if or .HasImport .HasImportIDConfig .HasImportIdentityConfig }} ## Import Import is supported using the following syntax: +{{- end }} +{{- if .HasImportIdentityConfig }} + +In Terraform v1.12.0 and later, the [`import` block](https://developer.hashicorp.com/terraform/language/import) can be used with the `identity` attribute, for example: + +{{tffile .ImportIdentityConfigFile }} + +{{ .IdentitySchemaMarkdown | trimspace }} +{{- end }} +{{- if .HasImportIDConfig }} + +In Terraform v1.5.0 and later, the [`import` block](https://developer.hashicorp.com/terraform/language/import) can be used with the ` + "`" + `id` + "`" + ` attribute, for example: + +{{tffile .ImportIDConfigFile }} +{{- end }} +{{- if .HasImport }} + +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: -{{ codefile "shell" "examples/resources/elasticstack_kibana_security_role/import.sh" }} +{{codefile "shell" .ImportFile }} +{{- end }} diff --git a/templates/resources/kibana_slo.md.tmpl b/templates/resources/kibana_slo.md.tmpl deleted file mode 100644 index c88772104..000000000 --- a/templates/resources/kibana_slo.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_slo Resource" -description: |- - Creates or updates a Kibana SLO. ---- - -# Resource: elasticstack_kibana_slo - -Creates or updates a Kibana SLO. See the [Kibana SLO docs](https://www.elastic.co/guide/en/observability/current/slo.html) and [dev docs](https://github.com/elastic/kibana/blob/main/x-pack/plugins/observability/dev_docs/slo.md) for more information. - -## Example Usage - -{{ tffile "examples/resources/elasticstack_kibana_slo/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_kibana_slo/import.sh" }} diff --git a/templates/resources/kibana_space.md.tmpl b/templates/resources/kibana_space.md.tmpl deleted file mode 100644 index 5033fd7d2..000000000 --- a/templates/resources/kibana_space.md.tmpl +++ /dev/null @@ -1,23 +0,0 @@ ---- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_space Resource" -description: |- - Creates or updates a Kibana space. ---- - -# Resource: elasticstack_kibana_space - -Creates or updates a Kibana space. See https://www.elastic.co/guide/en/kibana/master/xpack-spaces.html - -## Example Usage - -{{ tffile "examples/resources/elasticstack_kibana_space/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_kibana_space/import.sh" }} diff --git a/templates/resources/kibana_synthetics_monitor.md.tmpl b/templates/resources/kibana_synthetics_monitor.md.tmpl deleted file mode 100644 index 6f99d891e..000000000 --- a/templates/resources/kibana_synthetics_monitor.md.tmpl +++ /dev/null @@ -1,34 +0,0 @@ ---- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_synthetics_monitor Resource" -description: |- - Creates or updates a Kibana synthetics monitor. ---- - -# Resource: elasticstack_kibana_synthetics_monitor - -Creates or updates a Kibana synthetics monitor. -See [API docs](https://www.elastic.co/guide/en/kibana/current/add-monitor-api.html) - -## Supported monitor types - * `http` - * `tcp` - * `icmp` - * `browser` - -**NOTE:** Due-to nature of partial update API, reset values to defaults is not supported. -In case you would like to reset an optional monitor value, please set it explicitly or delete and create new monitor. - - -## Example Usage - -{{ tffile "examples/resources/elasticstack_kibana_synthetics_monitor/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_kibana_synthetics_monitor/import.sh" }} \ No newline at end of file diff --git a/templates/resources/kibana_synthetics_parameter.md.tmpl b/templates/resources/kibana_synthetics_parameter.md.tmpl deleted file mode 100644 index 6e2789c68..000000000 --- a/templates/resources/kibana_synthetics_parameter.md.tmpl +++ /dev/null @@ -1,25 +0,0 @@ ---- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_synthetics_parameter Resource" -description: |- - Creates or updates a Kibana synthetics parameter. ---- - -# Resource: elasticstack_kibana_synthetics_parameter - -Creates or updates a Kibana synthetics parameter. -See [Working with secrets and sensitive values](https://www.elastic.co/docs/solutions/observability/synthetics/work-with-params-secrets) -and [API docs](https://www.elastic.co/docs/api/doc/kibana/group/endpoint-synthetics) - -## Example Usage - -{{ tffile "examples/resources/elasticstack_kibana_synthetics_parameter/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_kibana_synthetics_parameter/import.sh" }} \ No newline at end of file diff --git a/templates/resources/kibana_synthetics_private_location.md.tmpl b/templates/resources/kibana_synthetics_private_location.md.tmpl deleted file mode 100644 index c50a4c4a5..000000000 --- a/templates/resources/kibana_synthetics_private_location.md.tmpl +++ /dev/null @@ -1,25 +0,0 @@ ---- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_synthetics_private_location Resource" -description: |- - Creates or updates a Kibana synthetics private location. ---- - -# Resource: elasticstack_kibana_synthetics_private_location - -Creates or updates a Kibana synthetics private location. -See [Monitor via a private agent](https://www.elastic.co/guide/en/observability/current/synthetics-private-location.html#monitor-via-private-agent) -and [API docs](https://www.elastic.co/guide/en/kibana/current/create-private-location-api.html) - -## Example Usage - -{{ tffile "examples/resources/elasticstack_kibana_synthetics_private_location/resource.tf" }} - -{{ .SchemaMarkdown | trimspace }} - -## Import - -Import is supported using the following syntax: - -{{ codefile "shell" "examples/resources/elasticstack_kibana_synthetics_private_location/import.sh" }} \ No newline at end of file From c9f36f5f9b2177db0dff9f6a0e729bfe50305e8d Mon Sep 17 00:00:00 2001 From: Toby Brain Date: Wed, 17 Sep 2025 10:04:20 +1000 Subject: [PATCH 4/6] make docs-generate --- .../elasticsearch_enrich_policy.md | 6 ++-- .../elasticsearch_index_template.md | 11 ++++--- docs/data-sources/elasticsearch_indices.md | 11 ++++--- docs/data-sources/elasticsearch_info.md | 13 ++++---- .../elasticsearch_ingest_processor_append.md | 15 ++++----- .../elasticsearch_ingest_processor_bytes.md | 18 ++++++----- .../elasticsearch_ingest_processor_circle.md | 14 ++++----- ...ticsearch_ingest_processor_community_id.md | 16 +++++----- .../elasticsearch_ingest_processor_convert.md | 31 +++++++++++++------ .../elasticsearch_ingest_processor_csv.md | 17 +++++----- .../elasticsearch_ingest_processor_date.md | 17 +++++----- ...search_ingest_processor_date_index_name.md | 15 ++++----- .../elasticsearch_ingest_processor_dissect.md | 16 +++++----- ...ticsearch_ingest_processor_dot_expander.md | 14 ++++----- .../elasticsearch_ingest_processor_drop.md | 14 ++++----- .../elasticsearch_ingest_processor_enrich.md | 13 ++++---- .../elasticsearch_ingest_processor_fail.md | 14 ++++----- ...sticsearch_ingest_processor_fingerprint.md | 13 ++++---- .../elasticsearch_ingest_processor_foreach.md | 22 +++++++------ .../elasticsearch_ingest_processor_geoip.md | 20 ++++++------ .../elasticsearch_ingest_processor_grok.md | 17 +++++----- .../elasticsearch_ingest_processor_gsub.md | 14 ++++----- ...asticsearch_ingest_processor_html_strip.md | 14 ++++----- .../elasticsearch_ingest_processor_join.md | 14 ++++----- .../elasticsearch_ingest_processor_json.md | 13 ++++---- .../elasticsearch_ingest_processor_kv.md | 14 ++++----- ...lasticsearch_ingest_processor_lowercase.md | 14 ++++----- ...arch_ingest_processor_network_direction.md | 23 +++++++------- ...elasticsearch_ingest_processor_pipeline.md | 16 ++++------ ...arch_ingest_processor_registered_domain.md | 14 ++++----- .../elasticsearch_ingest_processor_remove.md | 14 ++++----- .../elasticsearch_ingest_processor_rename.md | 14 ++++----- .../elasticsearch_ingest_processor_reroute.md | 16 +++++----- .../elasticsearch_ingest_processor_script.md | 24 ++++---------- .../elasticsearch_ingest_processor_set.md | 14 ++++----- ...arch_ingest_processor_set_security_user.md | 14 ++++----- .../elasticsearch_ingest_processor_sort.md | 14 ++++----- .../elasticsearch_ingest_processor_split.md | 14 ++++----- .../elasticsearch_ingest_processor_trim.md | 16 ++++------ ...lasticsearch_ingest_processor_uppercase.md | 14 ++++----- ...lasticsearch_ingest_processor_uri_parts.md | 14 ++++----- ...lasticsearch_ingest_processor_urldecode.md | 14 ++++----- ...asticsearch_ingest_processor_user_agent.md | 17 ++++------ .../elasticsearch_security_role.md | 11 ++++--- .../elasticsearch_security_role_mapping.md | 9 +++--- .../elasticsearch_security_user.md | 11 ++++--- .../elasticsearch_snapshot_repository.md | 9 +++--- docs/data-sources/fleet_enrollment_tokens.md | 11 ++++--- docs/data-sources/fleet_integration.md | 17 +++++++--- docs/data-sources/kibana_action_connector.md | 11 ++++--- docs/data-sources/kibana_security_role.md | 11 ++++--- docs/data-sources/kibana_spaces.md | 11 ++++--- docs/resources/apm_agent_configuration.md | 15 +++++---- .../elasticsearch_cluster_settings.md | 9 +++--- .../elasticsearch_component_template.md | 11 ++++--- docs/resources/elasticsearch_data_stream.md | 13 +++++--- .../elasticsearch_data_stream_lifecycle.md | 11 ++++--- docs/resources/elasticsearch_enrich_policy.md | 15 +++++---- docs/resources/elasticsearch_index.md | 18 +++++------ .../elasticsearch_index_lifecycle.md | 11 ++++--- .../resources/elasticsearch_index_template.md | 11 ++++--- .../elasticsearch_ingest_pipeline.md | 23 ++++++-------- .../elasticsearch_logstash_pipeline.md | 13 +++++--- docs/resources/elasticsearch_script.md | 13 +++++--- .../elasticsearch_security_api_key.md | 13 +++----- docs/resources/elasticsearch_security_role.md | 11 ++++--- .../elasticsearch_security_role_mapping.md | 9 ++++-- .../elasticsearch_security_system_user.md | 11 ++++--- docs/resources/elasticsearch_security_user.md | 11 ++++--- .../elasticsearch_snapshot_lifecycle.md | 11 ++++--- .../elasticsearch_snapshot_repository.md | 11 ++++--- docs/resources/elasticsearch_transform.md | 16 ++++++---- docs/resources/elasticsearch_watch.md | 15 +++++---- docs/resources/fleet_agent_policy.md | 13 +++++--- docs/resources/fleet_integration.md | 13 +++++--- docs/resources/fleet_integration_policy.md | 16 ++++++++-- docs/resources/fleet_output.md | 13 +++++--- docs/resources/fleet_server_host.md | 13 +++++--- docs/resources/kibana_action_connector.md | 13 +++++--- docs/resources/kibana_alerting_rule.md | 29 ++++++++++------- docs/resources/kibana_data_view.md | 13 +++++--- docs/resources/kibana_import_saved_objects.md | 13 +++----- docs/resources/kibana_maintenance_window.md | 11 ++++--- docs/resources/kibana_security_role.md | 12 ++++--- docs/resources/kibana_slo.md | 11 ++++--- docs/resources/kibana_space.md | 13 +++++--- docs/resources/kibana_synthetics_monitor.md | 24 +++++++++----- docs/resources/kibana_synthetics_parameter.md | 14 ++++++--- .../kibana_synthetics_private_location.md | 14 ++++++--- 89 files changed, 673 insertions(+), 596 deletions(-) diff --git a/docs/data-sources/elasticsearch_enrich_policy.md b/docs/data-sources/elasticsearch_enrich_policy.md index 9c286ae92..f5b18fd4e 100644 --- a/docs/data-sources/elasticsearch_enrich_policy.md +++ b/docs/data-sources/elasticsearch_enrich_policy.md @@ -1,11 +1,13 @@ + --- -subcategory: "Enrich" +# generated by https://github.com/hashicorp/terraform-plugin-docs page_title: "elasticstack_elasticsearch_enrich_policy Data Source - terraform-provider-elasticstack" +subcategory: "Enrich" description: |- Returns information about an enrich policy. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/get-enrich-policy-api.html --- -# Data Source: elasticstack_elasticsearch_enrich_policy +# elasticstack_elasticsearch_enrich_policy (Data Source) Returns information about an enrich policy. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/get-enrich-policy-api.html diff --git a/docs/data-sources/elasticsearch_index_template.md b/docs/data-sources/elasticsearch_index_template.md index 9beebbb90..da0da4488 100644 --- a/docs/data-sources/elasticsearch_index_template.md +++ b/docs/data-sources/elasticsearch_index_template.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_index_template Data Source - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_index_template Data Source" description: |- - Retrieves index template. + Retrieves information about an existing index template definition. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-template.html --- -# Data Source: elasticstack_elasticsearch_index_template +# elasticstack_elasticsearch_index_template (Data Source) -Use this data source to retrieve information about existing Elasticsearch index templates. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-template.html +Retrieves information about an existing index template definition. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-template.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_indices.md b/docs/data-sources/elasticsearch_indices.md index 36ad08c9d..1a0fdb5f4 100644 --- a/docs/data-sources/elasticsearch_indices.md +++ b/docs/data-sources/elasticsearch_indices.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_indices Data Source - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_indices Data Source" description: |- - Retrieves indices. + Retrieves information about existing Elasticsearch indices. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-index.html --- -# Data Source: elasticstack_elasticsearch_indices +# elasticstack_elasticsearch_indices (Data Source) -Use this data source to retrieve and get information about existing Elasticsearch indices. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-index.html +Retrieves information about existing Elasticsearch indices. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-index.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_info.md b/docs/data-sources/elasticsearch_info.md index 2ea14d1d8..1545d9c1b 100644 --- a/docs/data-sources/elasticsearch_info.md +++ b/docs/data-sources/elasticsearch_info.md @@ -1,14 +1,15 @@ + --- -subcategory: "Cluster" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_info Data Source" +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_info Data Source - terraform-provider-elasticstack" +subcategory: "Elasticsearch" description: |- - Gets information about the Elasticsearch cluster. + Gets information about the Elastic cluster. --- -# Data Source: elasticstack_elasticsearch_info +# elasticstack_elasticsearch_info (Data Source) -This data source provides the information about the configured Elasticsearch cluster +Gets information about the Elastic cluster. ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_append.md b/docs/data-sources/elasticsearch_ingest_processor_append.md index f33f66eb2..9cf501ed8 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_append.md +++ b/docs/data-sources/elasticsearch_ingest_processor_append.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_append Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_append Data Source" description: |- - Helper data source to create a processor which appends one or more values to an existing array if the field already exists and it is an array. + Helper data source which can be used to create the configuration for an append processor. This processor appends one or more values to an existing array if the field already exists and it is an array. Converts a scalar to an array and appends one or more values to it if the field exists and it is a scalar. Creates an array containing the provided values if the field doesn’t exist. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/append-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_append - -Helper data source to which can be used to create a processor to append one or more values to an existing array if the field already exists and it is an array. -Converts a scalar to an array and appends one or more values to it if the field exists and it is a scalar. Creates an array containing the provided values if the field doesn’t exist. +# elasticstack_elasticsearch_ingest_processor_append (Data Source) -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/append-processor.html +Helper data source which can be used to create the configuration for an append processor. This processor appends one or more values to an existing array if the field already exists and it is an array. Converts a scalar to an array and appends one or more values to it if the field exists and it is a scalar. Creates an array containing the provided values if the field doesn’t exist. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/append-processor.html ## Example Usage @@ -56,4 +54,3 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { - `id` (String) Internal identifier of the resource - `json` (String) JSON representation of this data source. - diff --git a/docs/data-sources/elasticsearch_ingest_processor_bytes.md b/docs/data-sources/elasticsearch_ingest_processor_bytes.md index 692f8b225..c9f311f0a 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_bytes.md +++ b/docs/data-sources/elasticsearch_ingest_processor_bytes.md @@ -1,18 +1,21 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_bytes Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_bytes Data Source" description: |- - Helper data source to create a processor which converts a human readable byte value (e.g. 1kb) to its value in bytes (e.g. 1024). + Helper data source which can be used to create the configuration for a bytes processor. The processor converts a human readable byte value (e.g. 1kb) to its value in bytes (e.g. 1024). See: https://www.elastic.co/guide/en/elasticsearch/reference/current/bytes-processor.html + If the field is an array of strings, all members of the array will be converted. + Supported human readable units are "b", "kb", "mb", "gb", "tb", "pb" case insensitive. An error will occur if the field is not a supported format or resultant value exceeds 2^63. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_bytes +# elasticstack_elasticsearch_ingest_processor_bytes (Data Source) -Helper data source to which can be used to create a processor to convert a human readable byte value (e.g. 1kb) to its value in bytes (e.g. 1024). If the field is an array of strings, all members of the array will be converted. +Helper data source which can be used to create the configuration for a bytes processor. The processor converts a human readable byte value (e.g. 1kb) to its value in bytes (e.g. 1024). See: https://www.elastic.co/guide/en/elasticsearch/reference/current/bytes-processor.html -Supported human readable units are "b", "kb", "mb", "gb", "tb", "pb" case insensitive. An error will occur if the field is not a supported format or resultant value exceeds 2^63. +If the field is an array of strings, all members of the array will be converted. -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/bytes-processor.html +Supported human readable units are "b", "kb", "mb", "gb", "tb", "pb" case insensitive. An error will occur if the field is not a supported format or resultant value exceeds 2^63. ## Example Usage @@ -55,4 +58,3 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { - `id` (String) Internal identifier of the resource - `json` (String) JSON representation of this data source. - diff --git a/docs/data-sources/elasticsearch_ingest_processor_circle.md b/docs/data-sources/elasticsearch_ingest_processor_circle.md index a526a56f5..b5784a6d1 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_circle.md +++ b/docs/data-sources/elasticsearch_ingest_processor_circle.md @@ -1,16 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_circle Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_circle Data Source" description: |- - Helper data source to create a processor which converts circle definitions of shapes to regular polygons which approximate them. + Helper data source which can be used to create the configuration for an circle processor. This processor converts circle definitions of shapes to regular polygons which approximate them. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-circle-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_circle - -Helper data source to which can be used to create a processor to convert circle definitions of shapes to regular polygons which approximate them. +# elasticstack_elasticsearch_ingest_processor_circle (Data Source) -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-circle-processor.html +Helper data source which can be used to create the configuration for an circle processor. This processor converts circle definitions of shapes to regular polygons which approximate them. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-circle-processor.html ## Example Usage @@ -57,4 +56,3 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { - `id` (String) Internal identifier of the resource - `json` (String) JSON representation of this data source. - diff --git a/docs/data-sources/elasticsearch_ingest_processor_community_id.md b/docs/data-sources/elasticsearch_ingest_processor_community_id.md index bb9bda3d1..c7376d59c 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_community_id.md +++ b/docs/data-sources/elasticsearch_ingest_processor_community_id.md @@ -1,20 +1,21 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_community_id Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_community_id Data Source" description: |- - Helper data source to create a processor which computes the Community ID for network flow data as defined in the Community ID Specification. + Helper data source which can be used to create the configuration for a community ID processor. This processor computes the Community ID for network flow data as defined in the Community ID Specification. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/community-id-processor.html + You can use a community ID to correlate network events related to a single flow. + The community ID processor reads network flow data from related Elastic Common Schema (ECS) https://www.elastic.co/guide/en/ecs/1.12 fields by default. If you use the ECS, no configuration is required. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_community_id +# elasticstack_elasticsearch_ingest_processor_community_id (Data Source) -Helper data source to which can be used to create a processor to compute the Community ID for network flow data as defined in the [Community ID Specification](https://github.com/corelight/community-id-spec). +Helper data source which can be used to create the configuration for a community ID processor. This processor computes the Community ID for network flow data as defined in the Community ID Specification. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/community-id-processor.html You can use a community ID to correlate network events related to a single flow. The community ID processor reads network flow data from related [Elastic Common Schema (ECS)](https://www.elastic.co/guide/en/ecs/1.12) fields by default. If you use the ECS, no configuration is required. -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/community-id-processor.html - ## Example Usage ```terraform @@ -59,4 +60,3 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { - `id` (String) Internal identifier of the resource - `json` (String) JSON representation of this data source. - diff --git a/docs/data-sources/elasticsearch_ingest_processor_convert.md b/docs/data-sources/elasticsearch_ingest_processor_convert.md index 25f6d94be..2778e5db3 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_convert.md +++ b/docs/data-sources/elasticsearch_ingest_processor_convert.md @@ -1,25 +1,37 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_convert Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_convert Data Source" description: |- - Helper data source to create a processor which converts a field in the currently ingested document to a different type, such as converting a string to an integer. + Helper data source which can be used to create the configuration for a convert processor. This processor converts a field in the currently ingested document to a different type, such as converting a string to an integer. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/convert-processor.html + The supported types include: + integerlongfloatdoublestringbooleanipauto + Specifying boolean will set the field to true if its string value is equal to true (ignoring case), to false if its string value is equal to false (ignoring case), or it will throw an exception otherwise. + Specifying ip will set the target field to the value of field if it contains a valid IPv4 or IPv6 address that can be indexed into an IP field type. + Specifying auto will attempt to convert the string-valued field into the closest non-string, non-IP type. For example, a field whose value is "true" will be converted to its respective boolean type: true. Do note that float takes precedence of double in auto. A value of "242.15" will "automatically" be converted to 242.15 of type float. If a provided field cannot be appropriately converted, the processor will still process successfully and leave the field value as-is. In such a case, target_field will be updated with the unconverted field value. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_convert +# elasticstack_elasticsearch_ingest_processor_convert (Data Source) -Helper data source to which can be used to convert a field in the currently ingested document to a different type, such as converting a string to an integer. If the field value is an array, all members will be converted. +Helper data source which can be used to create the configuration for a convert processor. This processor converts a field in the currently ingested document to a different type, such as converting a string to an integer. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/convert-processor.html -The supported types include: `integer`, `long`, `float`, `double`, `string`, `boolean`, `ip`, and `auto`. +The supported types include: +- `integer` +- `long` +- `float` +- `double` +- `string` +- `boolean` +- `ip` +- `auto` -Specifying `boolean` will set the field to true if its string value is equal to true (ignore case), to false if its string value is equal to false (ignore case), or it will throw an exception otherwise. +Specifying `boolean` will set the field to true if its string value is equal to true (ignoring case), to false if its string value is equal to false (ignoring case), or it will throw an exception otherwise. Specifying `ip` will set the target field to the value of `field` if it contains a valid IPv4 or IPv6 address that can be indexed into an IP field type. Specifying `auto` will attempt to convert the string-valued `field` into the closest non-string, non-IP type. For example, a field whose value is "true" will be converted to its respective boolean type: true. Do note that float takes precedence of double in auto. A value of "242.15" will "automatically" be converted to 242.15 of type `float`. If a provided field cannot be appropriately converted, the processor will still process successfully and leave the field value as-is. In such a case, `target_field` will be updated with the unconverted field value. -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/convert-processor.html - ## Example Usage ```terraform @@ -64,4 +76,3 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { - `id` (String) Internal identifier of the resource - `json` (String) JSON representation of this data source. - diff --git a/docs/data-sources/elasticsearch_ingest_processor_csv.md b/docs/data-sources/elasticsearch_ingest_processor_csv.md index b1b0aa307..755322b77 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_csv.md +++ b/docs/data-sources/elasticsearch_ingest_processor_csv.md @@ -1,16 +1,18 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_csv Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_csv Data Source" description: |- - Helper data source to create a processor which extracts fields from CSV line out of a single text field within a document. + Helper data source which can be used to create the configuration for a CSV processor. This processor extracts fields from CSV line out of a single text field within a document. Any empty field in CSV will be skipped. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/csv-processor.html + If the trim option is enabled then any whitespace in the beginning and in the end of each unquoted field will be trimmed. For example with configuration above, a value of A, B will result in field field2 having value {nbsp}B (with space at the beginning). If trim is enabled A, B will result in field field2 having value B (no whitespace). Quoted fields will be left untouched. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_csv +# elasticstack_elasticsearch_ingest_processor_csv (Data Source) -Helper data source to which can be used to extract fields from CSV line out of a single text field within a document. Any empty field in CSV will be skipped. +Helper data source which can be used to create the configuration for a CSV processor. This processor extracts fields from CSV line out of a single text field within a document. Any empty field in CSV will be skipped. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/csv-processor.html -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/csv-processor.html +If the `trim` option is enabled then any whitespace in the beginning and in the end of each unquoted field will be trimmed. For example with configuration above, a value of A, B will result in field field2 having value {nbsp}B (with space at the beginning). If trim is enabled A, B will result in field field2 having value B (no whitespace). Quoted fields will be left untouched. ## Example Usage @@ -33,8 +35,6 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { } ``` -If the `trim` option is enabled then any whitespace in the beginning and in the end of each unquoted field will be trimmed. For example with configuration above, a value of A, B will result in field field2 having value {nbsp}B (with space at the beginning). If trim is enabled A, B will result in field field2 having value B (no whitespace). Quoted fields will be left untouched. - ## Schema @@ -60,4 +60,3 @@ If the `trim` option is enabled then any whitespace in the beginning and in the - `id` (String) Internal identifier of the resource - `json` (String) JSON representation of this data source. - diff --git a/docs/data-sources/elasticsearch_ingest_processor_date.md b/docs/data-sources/elasticsearch_ingest_processor_date.md index 69d002d8a..83ee9d2ef 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_date.md +++ b/docs/data-sources/elasticsearch_ingest_processor_date.md @@ -1,22 +1,21 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_date Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_date Data Source" description: |- - Helper data source to create a processor which parses dates from fields, and then uses the date or timestamp as the timestamp for the document. + Helper data source which can be used to create the configuration for a date processor. This processor parses dates from fields, and then uses the date or timestamp as the timestamp for the document. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/date-processor.html + By default, the date processor adds the parsed date as a new field called @timestamp. You can specify a different field by setting the target_field configuration parameter. Multiple date formats are supported as part of the same date processor definition. They will be used sequentially to attempt parsing the date field, in the same order they were defined as part of the processor definition. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_date +# elasticstack_elasticsearch_ingest_processor_date (Data Source) -Helper data source to which can be used to parse dates from fields, and then uses the date or timestamp as the timestamp for the document. -By default, the date processor adds the parsed date as a new field called `@timestamp`. You can specify a different field by setting the `target_field` configuration parameter. Multiple date formats are supported as part of the same date processor definition. They will be used sequentially to attempt parsing the date field, in the same order they were defined as part of the processor definition. +Helper data source which can be used to create the configuration for a date processor. This processor parses dates from fields, and then uses the date or timestamp as the timestamp for the document. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/date-processor.html -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/date-processor.html +By default, the date processor adds the parsed date as a new field called `@timestamp`. You can specify a different field by setting the `target_field` configuration parameter. Multiple date formats are supported as part of the same date processor definition. They will be used sequentially to attempt parsing the date field, in the same order they were defined as part of the processor definition. ## Example Usage -Here is an example that adds the parsed date to the `timestamp` field based on the `initial_date` field: - ```terraform provider "elasticstack" { elasticsearch {} diff --git a/docs/data-sources/elasticsearch_ingest_processor_date_index_name.md b/docs/data-sources/elasticsearch_ingest_processor_date_index_name.md index 64580f566..150d868bb 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_date_index_name.md +++ b/docs/data-sources/elasticsearch_ingest_processor_date_index_name.md @@ -1,21 +1,22 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_date_index_name Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_date_index_name Data Source" description: |- - Helper data source to create a processor which helps to point documents to the right time based index based on a date or timestamp field in a document by using the date math index name support. + Helper data source which can be used to create the configuration for a date index name processor. The purpose of this processor is to point documents to the right time based index based on a date or timestamp field in a document by using the date math index name support. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/date-index-name-processor.html + The processor sets the _index metadata field with a date math index name expression based on the provided index name prefix, a date or timestamp field in the documents being processed and the provided date rounding. + First, this processor fetches the date or timestamp from a field in the document being processed. Optionally, date formatting can be configured on how the field’s value should be parsed into a date. Then this date, the provided index name prefix and the provided date rounding get formatted into a date math index name expression. Also here optionally date formatting can be specified on how the date should be formatted into a date math index name expression. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_date_index_name +# elasticstack_elasticsearch_ingest_processor_date_index_name (Data Source) -The purpose of this processor is to point documents to the right time based index based on a date or timestamp field in a document by using the date math index name support. +Helper data source which can be used to create the configuration for a date index name processor. The purpose of this processor is to point documents to the right time based index based on a date or timestamp field in a document by using the date math index name support. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/date-index-name-processor.html The processor sets the _index metadata field with a date math index name expression based on the provided index name prefix, a date or timestamp field in the documents being processed and the provided date rounding. First, this processor fetches the date or timestamp from a field in the document being processed. Optionally, date formatting can be configured on how the field’s value should be parsed into a date. Then this date, the provided index name prefix and the provided date rounding get formatted into a date math index name expression. Also here optionally date formatting can be specified on how the date should be formatted into a date math index name expression. -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/date-index-name-processor.html - ## Example Usage ```terraform diff --git a/docs/data-sources/elasticsearch_ingest_processor_dissect.md b/docs/data-sources/elasticsearch_ingest_processor_dissect.md index 4f8bf46eb..58afc68eb 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_dissect.md +++ b/docs/data-sources/elasticsearch_ingest_processor_dissect.md @@ -1,20 +1,22 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_dissect Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_dissect Data Source" description: |- - Helper data source to create a processor which extracts structured fields out of a single text field within a document. + Helper data source which can be used to create the configuration for a dissect processor. This processor extracts structured fields out of a single text field within a document. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/dissect-processor.html#dissect-processor + Similar to the Grok Processor, dissect also extracts structured fields out of a single text field within a document. However unlike the Grok Processor, dissect does not use Regular Expressions. This allows dissect’s syntax to be simple and for some cases faster than the Grok Processor. + Dissect matches a single text field against a defined pattern. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_dissect +# elasticstack_elasticsearch_ingest_processor_dissect (Data Source) + +Helper data source which can be used to create the configuration for a dissect processor. This processor extracts structured fields out of a single text field within a document. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/dissect-processor.html#dissect-processor Similar to the Grok Processor, dissect also extracts structured fields out of a single text field within a document. However unlike the Grok Processor, dissect does not use Regular Expressions. This allows dissect’s syntax to be simple and for some cases faster than the Grok Processor. Dissect matches a single text field against a defined pattern. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/dissect-processor.html - ## Example Usage ```terraform diff --git a/docs/data-sources/elasticsearch_ingest_processor_dot_expander.md b/docs/data-sources/elasticsearch_ingest_processor_dot_expander.md index 6c0743fae..4b2dc92e5 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_dot_expander.md +++ b/docs/data-sources/elasticsearch_ingest_processor_dot_expander.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_dot_expander Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_dot_expander Data Source" description: |- - Helper data source to create a processor which expands a field with dots into an object field. + Helper data source which can be used to create the configuration for a dot expander processor. This processor expands a field with dots into an object field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/dot-expand-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_dot_expander - -Expands a field with dots into an object field. This processor allows fields with dots in the name to be accessible by other processors in the pipeline. Otherwise these fields can’t be accessed by any processor. - -See: elastic.co/guide/en/elasticsearch/reference/current/dot-expand-processor.html +# elasticstack_elasticsearch_ingest_processor_dot_expander (Data Source) +Helper data source which can be used to create the configuration for a dot expander processor. This processor expands a field with dots into an object field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/dot-expand-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_drop.md b/docs/data-sources/elasticsearch_ingest_processor_drop.md index cb7ebd9f8..c677749b7 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_drop.md +++ b/docs/data-sources/elasticsearch_ingest_processor_drop.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_drop Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_drop Data Source" description: |- - Helper data source to create a processor which drops the document without raising any errors. + Helper data source which can be used to create the configuration for a drop processor. This processor drops the document without raising any errors. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/drop-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_drop - -Drops the document without raising any errors. This is useful to prevent the document from getting indexed based on some condition. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/drop-processor.html +# elasticstack_elasticsearch_ingest_processor_drop (Data Source) +Helper data source which can be used to create the configuration for a drop processor. This processor drops the document without raising any errors. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/drop-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_enrich.md b/docs/data-sources/elasticsearch_ingest_processor_enrich.md index b1f66e565..4e4c61d14 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_enrich.md +++ b/docs/data-sources/elasticsearch_ingest_processor_enrich.md @@ -1,16 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_enrich Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_enrich Data Source" description: |- - Helper data source to create a processor which enriches documents with data from another index. + Helper data source which can be used to create the configuration for an enrich processor. The enrich processor can enrich documents with data from another index. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_enrich - -The enrich processor can enrich documents with data from another index. See enrich data section for more information about how to set this up. +# elasticstack_elasticsearch_ingest_processor_enrich (Data Source) -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-enriching-data.html and https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-processor.html +Helper data source which can be used to create the configuration for an enrich processor. The enrich processor can enrich documents with data from another index. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_fail.md b/docs/data-sources/elasticsearch_ingest_processor_fail.md index 3ae3b778b..3ebc14892 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_fail.md +++ b/docs/data-sources/elasticsearch_ingest_processor_fail.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_fail Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_fail Data Source" description: |- - Helper data source to create a processor which raises an exception. + Helper data source which can be used to create the configuration for a fail processor. This processor raises an exception. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fail-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_fail - -Raises an exception. This is useful for when you expect a pipeline to fail and want to relay a specific message to the requester. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fail-processor.html +# elasticstack_elasticsearch_ingest_processor_fail (Data Source) +Helper data source which can be used to create the configuration for a fail processor. This processor raises an exception. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fail-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_fingerprint.md b/docs/data-sources/elasticsearch_ingest_processor_fingerprint.md index f852e051f..91d1498ce 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_fingerprint.md +++ b/docs/data-sources/elasticsearch_ingest_processor_fingerprint.md @@ -1,16 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_fingerprint Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_fingerprint Data Source" description: |- - Helper data source to create a processor which computes a hash of the document’s content. + Helper data source which can be used to create the configuration for a fingerprint processor. This processor computes a hash of the document’s content. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fingerprint-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_fingerprint - -Computes a hash of the document’s content. You can use this hash for content fingerprinting. +# elasticstack_elasticsearch_ingest_processor_fingerprint (Data Source) -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fingerprint-processor.html +Helper data source which can be used to create the configuration for a fingerprint processor. This processor computes a hash of the document’s content. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/fingerprint-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_foreach.md b/docs/data-sources/elasticsearch_ingest_processor_foreach.md index a448a7234..eb913da9d 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_foreach.md +++ b/docs/data-sources/elasticsearch_ingest_processor_foreach.md @@ -1,22 +1,26 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_foreach Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_foreach Data Source" description: |- - Helper data source to create a processor which runs an ingest processor on each element of an array or object. + Helper data source which can be used to create the configuration for a foreach processor. This processor runs an ingest processor on each element of an array or object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/foreach-processor.html + All ingest processors can run on array or object elements. However, if the number of elements is unknown, it can be cumbersome to process each one in the same way. + The foreach processor lets you specify a field containing array or object values and a processor to run on each element in the field. + Access keys and values + When iterating through an array or object, the foreach processor stores the current element’s value in the _ingest._value ingest metadata field. _ingest._value contains the entire element value, including any child fields. You can access child field values using dot notation on the _ingest._value field. + When iterating through an object, the foreach processor also stores the current element’s key as a string in _ingest._key. + You can access and change _ingest._key and _ingest._value in the processor. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_foreach +# elasticstack_elasticsearch_ingest_processor_foreach (Data Source) -Runs an ingest processor on each element of an array or object. +Helper data source which can be used to create the configuration for a foreach processor. This processor runs an ingest processor on each element of an array or object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/foreach-processor.html All ingest processors can run on array or object elements. However, if the number of elements is unknown, it can be cumbersome to process each one in the same way. The `foreach` processor lets you specify a `field` containing array or object values and a `processor` to run on each element in the field. -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/foreach-processor.html - - ### Access keys and values When iterating through an array or object, the foreach processor stores the current element’s value in the `_ingest._value` ingest metadata field. `_ingest._value` contains the entire element value, including any child fields. You can access child field values using dot notation on the `_ingest._value` field. @@ -25,8 +29,6 @@ When iterating through an object, the foreach processor also stores the current You can access and change `_ingest._key` and `_ingest._value` in the processor. - - ## Example Usage ```terraform diff --git a/docs/data-sources/elasticsearch_ingest_processor_geoip.md b/docs/data-sources/elasticsearch_ingest_processor_geoip.md index efec6890d..6bd6e84f9 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_geoip.md +++ b/docs/data-sources/elasticsearch_ingest_processor_geoip.md @@ -1,24 +1,24 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_geoip Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_geoip Data Source" description: |- - Helper data source to create a processor which adds information about the geographical location of an IPv4 or IPv6 address. + Helper data source which can be used to create the configuration for a geoip processor. The geoip processor adds information about the geographical location of an IPv4 or IPv6 address. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html + By default, the processor uses the GeoLite2 City, GeoLite2 Country, and GeoLite2 ASN GeoIP2 databases from MaxMind, shared under the CC BY-SA 4.0 license. Elasticsearch automatically downloads updates for these databases from the Elastic GeoIP endpoint: https://geoip.elastic.co/v1/database. To get download statistics for these updates, use the GeoIP stats API. + If your cluster can’t connect to the Elastic GeoIP endpoint or you want to manage your own updates, see Manage your own GeoIP2 database updates https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html#manage-geoip-database-updates. + If Elasticsearch can’t connect to the endpoint for 30 days all updated databases will become invalid. Elasticsearch will stop enriching documents with geoip data and will add tags: ["_geoip_expired_database"] field instead. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_geoip +# elasticstack_elasticsearch_ingest_processor_geoip (Data Source) -The geoip processor adds information about the geographical location of an IPv4 or IPv6 address. +Helper data source which can be used to create the configuration for a geoip processor. The geoip processor adds information about the geographical location of an IPv4 or IPv6 address. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html By default, the processor uses the GeoLite2 City, GeoLite2 Country, and GeoLite2 ASN GeoIP2 databases from MaxMind, shared under the CC BY-SA 4.0 license. Elasticsearch automatically downloads updates for these databases from the Elastic GeoIP endpoint: https://geoip.elastic.co/v1/database. To get download statistics for these updates, use the GeoIP stats API. If your cluster can’t connect to the Elastic GeoIP endpoint or you want to manage your own updates, [see Manage your own GeoIP2 database updates](https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html#manage-geoip-database-updates). -If Elasticsearch can’t connect to the endpoint for 30 days all updated databases will become invalid. Elasticsearch will stop enriching documents with geoip data and will add tags: ["_geoip_expired_database"] field instead. - - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html - +If Elasticsearch can’t connect to the endpoint for 30 days all updated databases will become invalid. Elasticsearch will stop enriching documents with geoip data and will add `tags: ["_geoip_expired_database"]` field instead. ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_grok.md b/docs/data-sources/elasticsearch_ingest_processor_grok.md index 9a078fc26..4a002a33c 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_grok.md +++ b/docs/data-sources/elasticsearch_ingest_processor_grok.md @@ -1,23 +1,22 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_grok Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_grok Data Source" description: |- - Helper data source to create a processor which extracts structured fields out of a single text field within a document. + Helper data source which can be used to create the configuration for a grok processor. This processor extracts structured fields out of a single text field within a document. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/grok-processor.html + This processor comes packaged with many reusable patterns https://github.com/elastic/elasticsearch/blob/master/libs/grok/src/main/resources/patterns. + If you need help building patterns to match your logs, you will find the Grok Debugger https://www.elastic.co/guide/en/kibana/master/xpack-grokdebugger.html tool quite useful! The Grok Constructor https://grokconstructor.appspot.com/ is also a useful tool. --- -# Data Source: elasticstack_elasticsearch_ingest_processor_grok +# elasticstack_elasticsearch_ingest_processor_grok (Data Source) -Extracts structured fields out of a single text field within a document. You choose which field to extract matched fields from, as well as the grok pattern you expect will match. A grok pattern is like a regular expression that supports aliased expressions that can be reused. +Helper data source which can be used to create the configuration for a grok processor. This processor extracts structured fields out of a single text field within a document. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/grok-processor.html This processor comes packaged with many [reusable patterns](https://github.com/elastic/elasticsearch/blob/master/libs/grok/src/main/resources/patterns). If you need help building patterns to match your logs, you will find the [Grok Debugger](https://www.elastic.co/guide/en/kibana/master/xpack-grokdebugger.html) tool quite useful! [The Grok Constructor](https://grokconstructor.appspot.com/) is also a useful tool. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/grok-processor.html - - ## Example Usage ```terraform diff --git a/docs/data-sources/elasticsearch_ingest_processor_gsub.md b/docs/data-sources/elasticsearch_ingest_processor_gsub.md index 3798599c7..e75bb2f4a 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_gsub.md +++ b/docs/data-sources/elasticsearch_ingest_processor_gsub.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_gsub Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_gsub Data Source" description: |- - Helper data source to create a processor which converts a string field by applying a regular expression and a replacement. + Helper data source which can be used to create the configuration for a gsub processor. This processor converts a string field by applying a regular expression and a replacement. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/gsub-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_gsub - -Converts a string field by applying a regular expression and a replacement. If the field is an array of string, all members of the array will be converted. If any non-string values are encountered, the processor will throw an exception. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/gsub-processor.html +# elasticstack_elasticsearch_ingest_processor_gsub (Data Source) +Helper data source which can be used to create the configuration for a gsub processor. This processor converts a string field by applying a regular expression and a replacement. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/gsub-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_html_strip.md b/docs/data-sources/elasticsearch_ingest_processor_html_strip.md index ba34acda0..171b76ed8 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_html_strip.md +++ b/docs/data-sources/elasticsearch_ingest_processor_html_strip.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_html_strip Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_html_strip Data Source" description: |- - Helper data source to create a processor which removes HTML tags from the field. + Helper data source which can be used to create the configuration for an HTML strip processor. This processor removes HTML tags from the field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/htmlstrip-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_html_strip - -Removes HTML tags from the field. If the field is an array of strings, HTML tags will be removed from all members of the array. - -See: templates/data-sources/elasticsearch_ingest_processor_html_strip.md.tmpl +# elasticstack_elasticsearch_ingest_processor_html_strip (Data Source) +Helper data source which can be used to create the configuration for an HTML strip processor. This processor removes HTML tags from the field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/htmlstrip-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_join.md b/docs/data-sources/elasticsearch_ingest_processor_join.md index 866178a67..a46139936 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_join.md +++ b/docs/data-sources/elasticsearch_ingest_processor_join.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_join Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_join Data Source" description: |- - Helper data source to create a processor which joins each element of an array into a single string using a separator character between each element. + Helper data source which can be used to create the configuration for a join processor. This processor joins each element of an array into a single string using a separator character between each element. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/join-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_join - -Joins each element of an array into a single string using a separator character between each element. Throws an error when the field is not an array. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/join-processor.html +# elasticstack_elasticsearch_ingest_processor_join (Data Source) +Helper data source which can be used to create the configuration for a join processor. This processor joins each element of an array into a single string using a separator character between each element. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/join-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_json.md b/docs/data-sources/elasticsearch_ingest_processor_json.md index f7b3d3c5a..e2a1e8989 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_json.md +++ b/docs/data-sources/elasticsearch_ingest_processor_json.md @@ -1,16 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_json Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_json Data Source" description: |- - Helper data source to create a processor which converts a JSON string into a structured JSON object. + Helper data source which can be used to create the configuration for a JSON processor. This processor converts a JSON string into a structured JSON object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/json-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_json - -Converts a JSON string into a structured JSON object. +# elasticstack_elasticsearch_ingest_processor_json (Data Source) -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/json-processor.html +Helper data source which can be used to create the configuration for a JSON processor. This processor converts a JSON string into a structured JSON object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/json-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_kv.md b/docs/data-sources/elasticsearch_ingest_processor_kv.md index 7dc000a0a..2723945ec 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_kv.md +++ b/docs/data-sources/elasticsearch_ingest_processor_kv.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_kv Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_kv Data Source" description: |- - Helper data source to create a processor which helps automatically parse messages (or specific event fields) which are of the `foo=bar` variety. + Helper data source which can be used to create the configuration for a KV processor. This processor helps automatically parse messages (or specific event fields) which are of the foo=bar variety. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/kv-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_kv - -This processor helps automatically parse messages (or specific event fields) which are of the `foo=bar` variety. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/kv-processor.html +# elasticstack_elasticsearch_ingest_processor_kv (Data Source) +Helper data source which can be used to create the configuration for a KV processor. This processor helps automatically parse messages (or specific event fields) which are of the foo=bar variety. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/kv-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_lowercase.md b/docs/data-sources/elasticsearch_ingest_processor_lowercase.md index b8f6c903b..ddc7beb06 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_lowercase.md +++ b/docs/data-sources/elasticsearch_ingest_processor_lowercase.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_lowercase Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_lowercase Data Source" description: |- - Helper data source to create a processor which converts a string to its lowercase equivalent. + Helper data source which can be used to create the configuration for a lowercase processor. This processor converts a string to its lowercase equivalent. If the field is an array of strings, all members of the array will be converted. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/lowercase-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_lowercase - -Converts a string to its lowercase equivalent. If the field is an array of strings, all members of the array will be converted. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/lowercase-processor.html +# elasticstack_elasticsearch_ingest_processor_lowercase (Data Source) +Helper data source which can be used to create the configuration for a lowercase processor. This processor converts a string to its lowercase equivalent. If the field is an array of strings, all members of the array will be converted. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/lowercase-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_network_direction.md b/docs/data-sources/elasticsearch_ingest_processor_network_direction.md index 7ab772a41..956158833 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_network_direction.md +++ b/docs/data-sources/elasticsearch_ingest_processor_network_direction.md @@ -1,21 +1,26 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_network_direction Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_network_direction Data Source" description: |- - Helper data source to create a processor which calculates the network direction given a source IP address, destination IP address, and a list of internal networks. + Helper data source which can be used to create the configuration for a network direction processor. This processor calculates the network direction given a source IP address, destination IP address, and a list of internal networks. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/network-direction-processor.html + The network direction processor reads IP addresses from Elastic Common Schema (ECS) fields by default. If you use the ECS, only the internal_networks option must be specified. + One of either internal_networks or internal_networks_field must be specified. If internal_networks_field is specified, it follows the behavior specified by ignore_missing. + Supported named network ranges + The named ranges supported for the internal_networks option are: + loopback - Matches loopback addresses in the range of 127.0.0.0/8 or ::1/128.unicast or global_unicast - Matches global unicast addresses defined in RFC 1122, RFC 4632, and RFC 4291 with the exception of the IPv4 broadcast address (255.255.255.255). This includes private address ranges.multicast - Matches multicast addresses.interface_local_multicast - Matches IPv6 interface-local multicast addresses.link_local_unicast - Matches link-local unicast addresses.link_local_multicast - Matches link-local multicast addresses.private - Matches private address ranges defined in RFC 1918 (IPv4) and RFC 4193 (IPv6).public - Matches addresses that are not loopback, unspecified, IPv4 broadcast, link local unicast, link local multicast, interface local multicast, or private.unspecified - Matches unspecified addresses (either the IPv4 address "0.0.0.0" or the IPv6 address "::"). --- -# Data Source: elasticstack_elasticsearch_ingest_processor_network_direction +# elasticstack_elasticsearch_ingest_processor_network_direction (Data Source) -Calculates the network direction given a source IP address, destination IP address, and a list of internal networks. +Helper data source which can be used to create the configuration for a network direction processor. This processor calculates the network direction given a source IP address, destination IP address, and a list of internal networks. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/network-direction-processor.html The network direction processor reads IP addresses from Elastic Common Schema (ECS) fields by default. If you use the ECS, only the `internal_networks` option must be specified. - One of either `internal_networks` or `internal_networks_field` must be specified. If `internal_networks_field` is specified, it follows the behavior specified by `ignore_missing`. -### Supported named network rangese +### Supported named network ranges The named ranges supported for the internal_networks option are: @@ -29,10 +34,6 @@ The named ranges supported for the internal_networks option are: * `public` - Matches addresses that are not loopback, unspecified, IPv4 broadcast, link local unicast, link local multicast, interface local multicast, or private. * `unspecified` - Matches unspecified addresses (either the IPv4 address "0.0.0.0" or the IPv6 address "::"). - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/network-direction-processor.html - - ## Example Usage ```terraform diff --git a/docs/data-sources/elasticsearch_ingest_processor_pipeline.md b/docs/data-sources/elasticsearch_ingest_processor_pipeline.md index 4374db6ce..8cf90edd5 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_pipeline.md +++ b/docs/data-sources/elasticsearch_ingest_processor_pipeline.md @@ -1,19 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_pipeline Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_pipeline Data Source" description: |- - Helper data source to create a processor which executes another pipeline. + Helper data source which can be used to create the configuration for a pipeline processor. This processor executes another pipeline. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/pipeline-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_pipeline - -Executes another pipeline. - -The name of the current pipeline can be accessed from the `_ingest.pipeline` ingest metadata key. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/pipeline-processor.html +# elasticstack_elasticsearch_ingest_processor_pipeline (Data Source) +Helper data source which can be used to create the configuration for a pipeline processor. This processor executes another pipeline. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/pipeline-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_registered_domain.md b/docs/data-sources/elasticsearch_ingest_processor_registered_domain.md index 215a0be3a..0641c9983 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_registered_domain.md +++ b/docs/data-sources/elasticsearch_ingest_processor_registered_domain.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_registered_domain Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_registered_domain Data Source" description: |- - Helper data source to create a processor which Extracts the registered domain, sub-domain, and top-level domain from a fully qualified domain name. + Helper data source which can be used to create the configuration for a registered domain processor. This processor extracts the registered domain (also known as the effective top-level domain or eTLD), sub-domain, and top-level domain from a fully qualified domain name (FQDN). See: https://www.elastic.co/guide/en/elasticsearch/reference/current/registered-domain-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_registered_domain - -Extracts the registered domain (also known as the effective top-level domain or eTLD), sub-domain, and top-level domain from a fully qualified domain name (FQDN). Uses the registered domains defined in the Mozilla Public Suffix List. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/registered-domain-processor.html +# elasticstack_elasticsearch_ingest_processor_registered_domain (Data Source) +Helper data source which can be used to create the configuration for a registered domain processor. This processor extracts the registered domain (also known as the effective top-level domain or eTLD), sub-domain, and top-level domain from a fully qualified domain name (FQDN). See: https://www.elastic.co/guide/en/elasticsearch/reference/current/registered-domain-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_remove.md b/docs/data-sources/elasticsearch_ingest_processor_remove.md index 5a5a1984c..b4d5ef6f7 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_remove.md +++ b/docs/data-sources/elasticsearch_ingest_processor_remove.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_remove Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_remove Data Source" description: |- - Helper data source to create a processor which removes existing fields. + Helper data source which can be used to create the configuration for a remove processor. This processor removes existing fields. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/remove-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_remove - -Removes existing fields. If one field doesn’t exist, an exception will be thrown. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/remove-processor.html +# elasticstack_elasticsearch_ingest_processor_remove (Data Source) +Helper data source which can be used to create the configuration for a remove processor. This processor removes existing fields. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/remove-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_rename.md b/docs/data-sources/elasticsearch_ingest_processor_rename.md index f1268b4f5..91655c4f2 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_rename.md +++ b/docs/data-sources/elasticsearch_ingest_processor_rename.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_rename Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_rename Data Source" description: |- - Helper data source to create a processor which renames an existing field. + Helper data source which can be used to create the configuration for a rename processor. This processor renames an existing field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/rename-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_rename - -Renames an existing field. If the field doesn’t exist or the new name is already used, an exception will be thrown. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/rename-processor.html +# elasticstack_elasticsearch_ingest_processor_rename (Data Source) +Helper data source which can be used to create the configuration for a rename processor. This processor renames an existing field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/rename-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_reroute.md b/docs/data-sources/elasticsearch_ingest_processor_reroute.md index bcb5a9bb0..32b763744 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_reroute.md +++ b/docs/data-sources/elasticsearch_ingest_processor_reroute.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_reroute Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_reroute Data Source" description: |- - Helper data source to create a processor which reroutes a document to a different data stream, index, or index alias. + Helper data source which can be used to create the configuration for a reroute processor. This processor reroutes a document to a different data stream, index, or index alias. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/reroute-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_reroute - -Reroutes a document to a different data stream, index, or index alias. This processor is useful for routing documents based on data stream routing rules. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/reroute-processor.html +# elasticstack_elasticsearch_ingest_processor_reroute (Data Source) +Helper data source which can be used to create the configuration for a reroute processor. This processor reroutes a document to a different data stream, index, or index alias. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/reroute-processor.html ## Example Usage @@ -52,4 +50,4 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { ### Read-Only - `id` (String) Internal identifier of the resource. -- `json` (String) JSON representation of this data source. \ No newline at end of file +- `json` (String) JSON representation of this data source. diff --git a/docs/data-sources/elasticsearch_ingest_processor_script.md b/docs/data-sources/elasticsearch_ingest_processor_script.md index b52d3cb41..fda8e951d 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_script.md +++ b/docs/data-sources/elasticsearch_ingest_processor_script.md @@ -1,27 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_script Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_script Data Source" description: |- - Helper data source to create a processor which runs an inline or stored script on incoming documents. + Helper data source which can be used to create the configuration for a script processor. This processor runs an inline or stored script on incoming documents. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/script-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_script - -Runs an inline or stored script on incoming documents. The script runs in the ingest context. - -The script processor uses the script cache to avoid recompiling the script for each incoming document. To improve performance, ensure the script cache is properly sized before using a script processor in production. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/script-processor.html - -### Access source fields - -The script processor parses each incoming document’s JSON source fields into a set of maps, lists, and primitives. To access these fields with a Painless script, use the map access operator: `ctx['my-field']`. You can also use the shorthand `ctx.` syntax. - -### Access metadata fields - -You can also use a script processor to access metadata fields. +# elasticstack_elasticsearch_ingest_processor_script (Data Source) +Helper data source which can be used to create the configuration for a script processor. This processor runs an inline or stored script on incoming documents. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/script-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_set.md b/docs/data-sources/elasticsearch_ingest_processor_set.md index 6eeab00e1..bf6b65838 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_set.md +++ b/docs/data-sources/elasticsearch_ingest_processor_set.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_set Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_set Data Source" description: |- - Helper data source to create a processor which sets one field and associates it with the specified value. + Helper data source which can be used to create the configuration for a set processor. This processor sets one field and associates it with the specified value. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/set-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_set - -Sets one field and associates it with the specified value. If the field already exists, its value will be replaced with the provided one. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/set-processor.html +# elasticstack_elasticsearch_ingest_processor_set (Data Source) +Helper data source which can be used to create the configuration for a set processor. This processor sets one field and associates it with the specified value. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/set-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_set_security_user.md b/docs/data-sources/elasticsearch_ingest_processor_set_security_user.md index 62249a9f0..2586e6470 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_set_security_user.md +++ b/docs/data-sources/elasticsearch_ingest_processor_set_security_user.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_set_security_user Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_set_security_user Data Source" description: |- - Helper data source to create a processor which sets user-related details from the current authenticated user to the current document by pre-processing the ingest. + Helper data source which can be used to create the configuration for a set security user processor. This processor sets user-related details (such as username, roles, email, full_name, metadata, api_key, realm and authentication_type) from the current authenticated user to the current document by pre-processing the ingest. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-node-set-security-user-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_set_security_user - -Sets user-related details (such as `username`, `roles`, `email`, `full_name`, `metadata`, `api_key`, `realm` and `authentication_typ`e) from the current authenticated user to the current document by pre-processing the ingest. The `api_key` property exists only if the user authenticates with an API key. It is an object containing the id, name and metadata (if it exists and is non-empty) fields of the API key. The realm property is also an object with two fields, name and type. When using API key authentication, the realm property refers to the realm from which the API key is created. The `authentication_type property` is a string that can take value from `REALM`, `API_KEY`, `TOKEN` and `ANONYMOUS`. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-node-set-security-user-processor.html +# elasticstack_elasticsearch_ingest_processor_set_security_user (Data Source) +Helper data source which can be used to create the configuration for a set security user processor. This processor sets user-related details (such as username, roles, email, full_name, metadata, api_key, realm and authentication_type) from the current authenticated user to the current document by pre-processing the ingest. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-node-set-security-user-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_sort.md b/docs/data-sources/elasticsearch_ingest_processor_sort.md index c4c240503..a565104b4 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_sort.md +++ b/docs/data-sources/elasticsearch_ingest_processor_sort.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_sort Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_sort Data Source" description: |- - Helper data source to create a processor which sorts the elements of an array ascending or descending. + Helper data source which can be used to create the configuration for a sort processor. This processor sorts the elements of an array ascending or descending. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/sort-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_sort - -Sorts the elements of an array ascending or descending. Homogeneous arrays of numbers will be sorted numerically, while arrays of strings or heterogeneous arrays of strings + numbers will be sorted lexicographically. Throws an error when the field is not an array. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/sort-processor.html +# elasticstack_elasticsearch_ingest_processor_sort (Data Source) +Helper data source which can be used to create the configuration for a sort processor. This processor sorts the elements of an array ascending or descending. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/sort-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_split.md b/docs/data-sources/elasticsearch_ingest_processor_split.md index d8f318509..88a9b6e1e 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_split.md +++ b/docs/data-sources/elasticsearch_ingest_processor_split.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_split Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_split Data Source" description: |- - Helper data source to create a processor which splits a field into an array using a separator character. + Helper data source which can be used to create the configuration for a split processor. This processor splits a field into an array using a separator character. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/split-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_split - -Splits a field into an array using a separator character. Only works on string fields. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/split-processor.html +# elasticstack_elasticsearch_ingest_processor_split (Data Source) +Helper data source which can be used to create the configuration for a split processor. This processor splits a field into an array using a separator character. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/split-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_trim.md b/docs/data-sources/elasticsearch_ingest_processor_trim.md index 4f230cff9..224fe5b99 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_trim.md +++ b/docs/data-sources/elasticsearch_ingest_processor_trim.md @@ -1,19 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_trim Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_trim Data Source" description: |- - Helper data source to create a processor which trims whitespace from field. + Helper data source which can be used to create the configuration for a trim processor. This processor trims whitespace from field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/trim-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_trim - -Trims whitespace from field. If the field is an array of strings, all members of the array will be trimmed. - -**NOTE:** This only works on leading and trailing whitespace. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/trim-processor.html +# elasticstack_elasticsearch_ingest_processor_trim (Data Source) +Helper data source which can be used to create the configuration for a trim processor. This processor trims whitespace from field. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/trim-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_uppercase.md b/docs/data-sources/elasticsearch_ingest_processor_uppercase.md index 6954ed14c..2ddda4d33 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_uppercase.md +++ b/docs/data-sources/elasticsearch_ingest_processor_uppercase.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_uppercase Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_uppercase Data Source" description: |- - Helper data source to create a processor which converts a string to its uppercase equivalent. + Helper data source which can be used to create the configuration for an uppercase processor. This processor converts a string to its uppercase equivalent. If the field is an array of strings, all members of the array will be converted. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uppercase-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_uppercase - -Converts a string to its uppercase equivalent. If the field is an array of strings, all members of the array will be converted. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uppercase-processor.html +# elasticstack_elasticsearch_ingest_processor_uppercase (Data Source) +Helper data source which can be used to create the configuration for an uppercase processor. This processor converts a string to its uppercase equivalent. If the field is an array of strings, all members of the array will be converted. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uppercase-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_uri_parts.md b/docs/data-sources/elasticsearch_ingest_processor_uri_parts.md index 5867f8baf..2e6db8a25 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_uri_parts.md +++ b/docs/data-sources/elasticsearch_ingest_processor_uri_parts.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_uri_parts Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_uri_parts Data Source" description: |- - Helper data source to create a processor which parses a Uniform Resource Identifier (URI) string and extracts its components as an object. + Helper data source which can be used to create the configuration for a URI parts processor. This processor parses a Uniform Resource Identifier (URI) string and extracts its components as an object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uri-parts-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_uri_parts - -Parses a Uniform Resource Identifier (URI) string and extracts its components as an object. This URI object includes properties for the URI’s domain, path, fragment, port, query, scheme, user info, username, and password. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uri-parts-processor.html +# elasticstack_elasticsearch_ingest_processor_uri_parts (Data Source) +Helper data source which can be used to create the configuration for a URI parts processor. This processor parses a Uniform Resource Identifier (URI) string and extracts its components as an object. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/uri-parts-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_urldecode.md b/docs/data-sources/elasticsearch_ingest_processor_urldecode.md index e8dae0d43..6f8265555 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_urldecode.md +++ b/docs/data-sources/elasticsearch_ingest_processor_urldecode.md @@ -1,17 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_urldecode Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_urldecode Data Source" description: |- - Helper data source to create a processor which URL-decodes a string. + Helper data source which can be used to create the configuration for a URL-decode processor. This processor URL-decodes a string. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/urldecode-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_urldecode - -URL-decodes a string. If the field is an array of strings, all members of the array will be decoded. - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/urldecode-processor.html +# elasticstack_elasticsearch_ingest_processor_urldecode (Data Source) +Helper data source which can be used to create the configuration for a URL-decode processor. This processor URL-decodes a string. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/urldecode-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_ingest_processor_user_agent.md b/docs/data-sources/elasticsearch_ingest_processor_user_agent.md index 1c728515b..3d07503f0 100644 --- a/docs/data-sources/elasticsearch_ingest_processor_user_agent.md +++ b/docs/data-sources/elasticsearch_ingest_processor_user_agent.md @@ -1,20 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_processor_user_agent Data Source - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_processor_user_agent Data Source" description: |- - Helper data source to create a processor which extracts details from the user agent string a browser sends with its web requests. + Helper data source which can be used to create the configuration for a user agent processor. This processor extracts details from the user agent string a browser sends with its web requests. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/user-agent-processor.html --- -# Data Source: elasticstack_elasticsearch_ingest_processor_user_agent - -The `user_agent` processor extracts details from the user agent string a browser sends with its web requests. This processor adds this information by default under the `user_agent` field. - -The ingest-user-agent module ships by default with the regexes.yaml made available by uap-java with an Apache 2.0 license. For more details see https://github.com/ua-parser/uap-core. - - -See: https://www.elastic.co/guide/en/elasticsearch/reference/current/user-agent-processor.html +# elasticstack_elasticsearch_ingest_processor_user_agent (Data Source) +Helper data source which can be used to create the configuration for a user agent processor. This processor extracts details from the user agent string a browser sends with its web requests. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/user-agent-processor.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_security_role.md b/docs/data-sources/elasticsearch_security_role.md index f41c364c0..4f50c343b 100644 --- a/docs/data-sources/elasticsearch_security_role.md +++ b/docs/data-sources/elasticsearch_security_role.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_security_role Data Source - terraform-provider-elasticstack" subcategory: "Security" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_security_role Data Source" description: |- - Retrieves roles in the native realm. + Retrieves roles in the native realm. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role.html --- -# Data Source: elasticstack_elasticsearch_security_role +# elasticstack_elasticsearch_security_role (Data Source) -Use this data source to get information about an existing Elasticsearch role. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role.html +Retrieves roles in the native realm. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_security_role_mapping.md b/docs/data-sources/elasticsearch_security_role_mapping.md index 01c6ca5ab..83eef6d35 100644 --- a/docs/data-sources/elasticsearch_security_role_mapping.md +++ b/docs/data-sources/elasticsearch_security_role_mapping.md @@ -1,12 +1,13 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_security_role_mapping Data Source - terraform-provider-elasticstack" subcategory: "Security" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_security_role_mapping Data Source" description: |- - Retrieves role mappings. + Retrieves role mappings. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role-mapping.html --- -# Data Source: elasticstack_elasticsearch_security_role_mapping +# elasticstack_elasticsearch_security_role_mapping (Data Source) Retrieves role mappings. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role-mapping.html diff --git a/docs/data-sources/elasticsearch_security_user.md b/docs/data-sources/elasticsearch_security_user.md index 6fc2c049c..480037be0 100644 --- a/docs/data-sources/elasticsearch_security_user.md +++ b/docs/data-sources/elasticsearch_security_user.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_security_user Data Source - terraform-provider-elasticstack" subcategory: "Security" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_security_user Data Source" description: |- - Gets information about Elasticsearch user. + Get the information about the user in the ES cluster. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-user.html --- -# Data Source: elasticstack_elasticsearch_security_user +# elasticstack_elasticsearch_security_user (Data Source) -Use this data source to get information about existing Elasticsearch user. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-user.html". +Get the information about the user in the ES cluster. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-user.html ## Example Usage diff --git a/docs/data-sources/elasticsearch_snapshot_repository.md b/docs/data-sources/elasticsearch_snapshot_repository.md index 7004d0ef4..9987b6e1e 100644 --- a/docs/data-sources/elasticsearch_snapshot_repository.md +++ b/docs/data-sources/elasticsearch_snapshot_repository.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_snapshot_repository Data Source - terraform-provider-elasticstack" subcategory: "Snapshot" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_snapshot_repository Data Source" description: |- Gets information about the registered snapshot repositories. --- -# Data Source: elasticstack_elasticsearch_snapshot_repository +# elasticstack_elasticsearch_snapshot_repository (Data Source) -This data source provides the information about the registered snaphosts repositories +Gets information about the registered snapshot repositories. ## Example Usage diff --git a/docs/data-sources/fleet_enrollment_tokens.md b/docs/data-sources/fleet_enrollment_tokens.md index 215ba3621..941a24637 100644 --- a/docs/data-sources/fleet_enrollment_tokens.md +++ b/docs/data-sources/fleet_enrollment_tokens.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_fleet_enrollment_tokens Data Source - terraform-provider-elasticstack" subcategory: "Fleet" -layout: "" -page_title: "Elasticstack: elasticstack_fleet_enrollment_tokens Data Source" description: |- - Gets information about Fleet Enrollment Tokens. See https://www.elastic.co/guide/en/fleet/current/fleet-enrollment-tokens.html + Retrieves Elasticsearch API keys used to enroll Elastic Agents in Fleet. See: https://www.elastic.co/guide/en/fleet/current/fleet-enrollment-tokens.html --- -# Data Source: elasticstack_fleet_enrollment_tokens +# elasticstack_fleet_enrollment_tokens (Data Source) -This data source provides information about Fleet Enrollment Tokens. +Retrieves Elasticsearch API keys used to enroll Elastic Agents in Fleet. See: https://www.elastic.co/guide/en/fleet/current/fleet-enrollment-tokens.html ## Example Usage diff --git a/docs/data-sources/fleet_integration.md b/docs/data-sources/fleet_integration.md index 4b329781c..9a9493cf1 100644 --- a/docs/data-sources/fleet_integration.md +++ b/docs/data-sources/fleet_integration.md @@ -1,12 +1,21 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_fleet_integration Data Source - terraform-provider-elasticstack" subcategory: "Fleet" -layout: "" -page_title: "Elasticstack: elasticstack_fleet_integration Data Source" description: |- - Gets information about a Fleet integration package. + This data source provides information about a Fleet integration package. Currently, + the data source will retrieve the latest available version of the package. Version + selection is determined by the Fleet API, which is currently based on semantic + versioning. + By default, the highest GA release version will be selected. If a + package is not GA (the version is below 1.0.0) or if a new non-GA version of the + package is to be selected (i.e., the GA version of the package is 1.5.0, but there's + a new 1.5.1-beta version available), then the prerelease parameter in the plan + should be set to true. --- -# Data Source: elasticstack_fleet_integration +# elasticstack_fleet_integration (Data Source) This data source provides information about a Fleet integration package. Currently, the data source will retrieve the latest available version of the package. Version diff --git a/docs/data-sources/kibana_action_connector.md b/docs/data-sources/kibana_action_connector.md index 5513b5d70..ed5b3b505 100644 --- a/docs/data-sources/kibana_action_connector.md +++ b/docs/data-sources/kibana_action_connector.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_kibana_action_connector Data Source - terraform-provider-elasticstack" subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_action_connector Data Source" description: |- - Retrieve a specific action connector role. See https://www.elastic.co/guide/en/kibana/current/get-all-connectors-api.html. + Search for a connector by name, space id, and type. Note, that this data source will fail if more than one connector shares the same name. --- -# Data Source: elasticstack_kibana_action_connector +# elasticstack_kibana_action_connector (Data Source) -Use this data source to get information about an existing action connector. +Search for a connector by name, space id, and type. Note, that this data source will fail if more than one connector shares the same name. ## Example Usage diff --git a/docs/data-sources/kibana_security_role.md b/docs/data-sources/kibana_security_role.md index f2db9f711..6c2511054 100644 --- a/docs/data-sources/kibana_security_role.md +++ b/docs/data-sources/kibana_security_role.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_kibana_security_role Data Source - terraform-provider-elasticstack" subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_security_role Data Source" description: |- - Retrieve a specific Kibana role. See https://www.elastic.co/guide/en/kibana/master/role-management-specific-api-get.html + Retrieve a specific role. See, https://www.elastic.co/guide/en/kibana/current/role-management-specific-api-get.html --- -# Data Source: elasticstack_kibana_security_role +# elasticstack_kibana_security_role (Data Source) -Use this data source to get information about an existing Kibana role. +Retrieve a specific role. See, https://www.elastic.co/guide/en/kibana/current/role-management-specific-api-get.html ## Example Usage diff --git a/docs/data-sources/kibana_spaces.md b/docs/data-sources/kibana_spaces.md index efcfdc282..df1d7fb28 100644 --- a/docs/data-sources/kibana_spaces.md +++ b/docs/data-sources/kibana_spaces.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_kibana_spaces Data Source - terraform-provider-elasticstack" subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_kibana_spaces Data Source" description: |- - Retrieve all Kibana spaces. See https://www.elastic.co/guide/en/kibana/master/spaces-api-get-all.html + Use this data source to retrieve and get information about all existing Kibana spaces. See https://www.elastic.co/guide/en/kibana/master/spaces-api-get-all.html --- -# Data Source: elasticstack_kibana_spaces +# elasticstack_kibana_spaces (Data Source) -Use this data source to retrieve and get information about all existing Kibana spaces. +Use this data source to retrieve and get information about all existing Kibana spaces. See https://www.elastic.co/guide/en/kibana/master/spaces-api-get-all.html ## Example Usage diff --git a/docs/resources/apm_agent_configuration.md b/docs/resources/apm_agent_configuration.md index 8df89ceae..df99f512e 100644 --- a/docs/resources/apm_agent_configuration.md +++ b/docs/resources/apm_agent_configuration.md @@ -1,14 +1,15 @@ + --- -subcategory: "Kibana" -layout: "" -page_title: "Elasticstack: elasticstack_apm_agent_configuration Resource" +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_apm_agent_configuration Resource - terraform-provider-elasticstack" +subcategory: "APM" description: |- - Creates or updates an APM agent configuration + Creates or updates an APM agent configuration. See https://www.elastic.co/docs/solutions/observability/apm/apm-agent-central-configuration. --- -# Resource: elasticstack_apm_agent_configuration +# elasticstack_apm_agent_configuration (Resource) -Creates or updates an APM agent configuration. See https://www.elastic.co/docs/solutions/observability/apm/apm-agent-central-configuration +Creates or updates an APM agent configuration. See https://www.elastic.co/docs/solutions/observability/apm/apm-agent-central-configuration. ## Example Usage @@ -49,6 +50,8 @@ resource "elasticstack_apm_agent_configuration" "test_config" { Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_apm_agent_configuration.test_configuration my-service:production ``` diff --git a/docs/resources/elasticsearch_cluster_settings.md b/docs/resources/elasticsearch_cluster_settings.md index bd591363e..a06843f3c 100644 --- a/docs/resources/elasticsearch_cluster_settings.md +++ b/docs/resources/elasticsearch_cluster_settings.md @@ -1,12 +1,13 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_cluster_settings Resource - terraform-provider-elasticstack" subcategory: "Cluster" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_cluster_settings Resource" description: |- - Updates cluster-wide settings. + Updates cluster-wide settings. If the Elasticsearch security features are enabled, you must have the manage cluster privilege to use this API. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html --- -# Resource: elasticstack_elasticsearch_cluster_settings +# elasticstack_elasticsearch_cluster_settings (Resource) Updates cluster-wide settings. If the Elasticsearch security features are enabled, you must have the manage cluster privilege to use this API. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html diff --git a/docs/resources/elasticsearch_component_template.md b/docs/resources/elasticsearch_component_template.md index 441232654..88d53ff09 100644 --- a/docs/resources/elasticsearch_component_template.md +++ b/docs/resources/elasticsearch_component_template.md @@ -1,12 +1,13 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_component_template Resource - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_component_template Resource" description: |- - Creates or updates a component template. + Creates or updates a component template. Component templates are building blocks for constructing index templates that specify index mappings, settings, and aliases. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-component-template.html --- -# Resource: elasticstack_elasticsearch_component_template +# elasticstack_elasticsearch_component_template (Resource) Creates or updates a component template. Component templates are building blocks for constructing index templates that specify index mappings, settings, and aliases. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-component-template.html @@ -108,6 +109,8 @@ Optional: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_component_template.my_template / ``` diff --git a/docs/resources/elasticsearch_data_stream.md b/docs/resources/elasticsearch_data_stream.md index 6cfb9e88d..2402ab784 100644 --- a/docs/resources/elasticsearch_data_stream.md +++ b/docs/resources/elasticsearch_data_stream.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_data_stream Resource - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_data_stream Resource" description: |- - Manages Elasticsearch Data Streams + Managing Elasticsearch data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html --- -# Resource: elasticstack_elasticsearch_data_stream +# elasticstack_elasticsearch_data_stream (Resource) -Manages data streams. This resource can create, delete and show the information about the created data stream. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html +Managing Elasticsearch data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html ## Example Usage @@ -123,6 +124,8 @@ Read-Only: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_data_stream.my_data_stream / ``` diff --git a/docs/resources/elasticsearch_data_stream_lifecycle.md b/docs/resources/elasticsearch_data_stream_lifecycle.md index c06eb8096..cbb14db59 100644 --- a/docs/resources/elasticsearch_data_stream_lifecycle.md +++ b/docs/resources/elasticsearch_data_stream_lifecycle.md @@ -1,12 +1,13 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_data_stream_lifecycle Resource - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_data_stream_lifecycle Resource" description: |- - Manages Lifecycle for Elasticsearch Data Streams + Configures the data stream lifecycle for the targeted data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html --- -# Resource: elasticstack_elasticsearch_data_stream_lifecycle +# elasticstack_elasticsearch_data_stream_lifecycle (Resource) Configures the data stream lifecycle for the targeted data streams, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/data-stream-apis.html @@ -105,6 +106,8 @@ Optional: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_data_stream_lifecycle.my_data_stream_lifecycle / ``` diff --git a/docs/resources/elasticsearch_enrich_policy.md b/docs/resources/elasticsearch_enrich_policy.md index 1845fc54e..138c675f0 100644 --- a/docs/resources/elasticsearch_enrich_policy.md +++ b/docs/resources/elasticsearch_enrich_policy.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_enrich_policy Resource - terraform-provider-elasticstack" subcategory: "Enrich" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_enrich_policy" description: |- - Managing Elasticsearch enrich policies, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-apis.html + Managing Elasticsearch enrich policies. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-apis.html --- -# Resource: elasticstack_elasticsearch_enrich_policy +# elasticstack_elasticsearch_enrich_policy (Resource) -Creates or updates enrich policies, see: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-apis.html +Managing Elasticsearch enrich policies. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-apis.html ## Example Usage @@ -90,8 +91,10 @@ Optional: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell # NOTE: while importing index resource, keep in mind, that some of the default index settings will be imported into the TF state too # You can later adjust the index configuration to account for those imported settings terraform import elasticstack_elasticsearch_enrich_policy.policy1 / -``` \ No newline at end of file +``` diff --git a/docs/resources/elasticsearch_index.md b/docs/resources/elasticsearch_index.md index da59dd302..15e5f3377 100644 --- a/docs/resources/elasticsearch_index.md +++ b/docs/resources/elasticsearch_index.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_index Resource - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_index Resource" description: |- - Creates or updates an index. + Creates Elasticsearch indices. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html --- -# Resource: elasticstack_elasticsearch_index +# elasticstack_elasticsearch_index (Resource) -Creates or updates an index. This resource can define settings, mappings and aliases. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html +Creates Elasticsearch indices. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html ## Example Usage @@ -190,13 +191,10 @@ Required: ## Import -**NOTE:** While importing index resource, keep in mind, that some of the default index settings will be imported into the TF state too. -You can later adjust the index configuration to account for those imported settings. - -Some of the default settings, which could be imported are: `index.number_of_replicas`, `index.number_of_shards` and `index.routing.allocation.include._tier_preference`. - Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell # NOTE: while importing index resource, keep in mind, that some of the default index settings will be imported into the TF state too # You can later adjust the index configuration to account for those imported settings diff --git a/docs/resources/elasticsearch_index_lifecycle.md b/docs/resources/elasticsearch_index_lifecycle.md index 99c241035..efbffa2ff 100644 --- a/docs/resources/elasticsearch_index_lifecycle.md +++ b/docs/resources/elasticsearch_index_lifecycle.md @@ -1,12 +1,13 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_index_lifecycle Resource - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_index_lifecycle Resource" description: |- - Creates or updates lifecycle policy. + Creates or updates lifecycle policy. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-put-lifecycle.html and https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-index-lifecycle.html --- -# Resource: elasticstack_elasticsearch_index_lifecycle +# elasticstack_elasticsearch_index_lifecycle (Resource) Creates or updates lifecycle policy. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-put-lifecycle.html and https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-index-lifecycle.html @@ -434,6 +435,8 @@ Optional: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_index_lifecycle.my_ilm / ``` diff --git a/docs/resources/elasticsearch_index_template.md b/docs/resources/elasticsearch_index_template.md index 08715c32e..e6cc51aa6 100644 --- a/docs/resources/elasticsearch_index_template.md +++ b/docs/resources/elasticsearch_index_template.md @@ -1,12 +1,13 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_index_template Resource - terraform-provider-elasticstack" subcategory: "Index" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_index_template Resource" description: |- - Creates or updates an index template. + Creates or updates an index template. Index templates define settings, mappings, and aliases that can be applied automatically to new indices. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-put-template.html --- -# Resource: elasticstack_elasticsearch_index_template +# elasticstack_elasticsearch_index_template (Resource) Creates or updates an index template. Index templates define settings, mappings, and aliases that can be applied automatically to new indices. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-put-template.html @@ -136,6 +137,8 @@ Required: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_index_template.my_template / ``` diff --git a/docs/resources/elasticsearch_ingest_pipeline.md b/docs/resources/elasticsearch_ingest_pipeline.md index 53f4c94dd..22d371057 100644 --- a/docs/resources/elasticsearch_ingest_pipeline.md +++ b/docs/resources/elasticsearch_ingest_pipeline.md @@ -1,24 +1,24 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_ingest_pipeline Resource - terraform-provider-elasticstack" subcategory: "Ingest" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_ingest_pipeline Resource" description: |- - Manages Ingest Pipelines + Manages tasks and resources related to ingest pipelines and processors. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-apis.html --- -# Resource: elasticstack_elasticsearch_ingest_pipeline +# elasticstack_elasticsearch_ingest_pipeline (Resource) -Use ingest APIs to manage tasks and resources related to ingest pipelines and processors. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-apis.html +Manages tasks and resources related to ingest pipelines and processors. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest-apis.html ## Example Usage -You can provide your custom JSON definitions for the ingest processors: - ```terraform provider "elasticstack" { elasticsearch {} } +// You can provide the ingest pipeline processors as plain JSON objects. resource "elasticstack_elasticsearch_ingest_pipeline" "my_ingest_pipeline" { name = "my_ingest_pipeline" description = "My first ingest pipeline managed by Terraform" @@ -43,12 +43,8 @@ EOF , ] } -``` - -Or you can use data sources and Terraform declarative way of defining the ingest processors: - -```terraform +// Or you can use the provided data sources to create the processor data sources. data "elasticstack_elasticsearch_ingest_processor_set" "set_count" { field = "count" value = 1 @@ -69,7 +65,6 @@ resource "elasticstack_elasticsearch_ingest_pipeline" "ingest" { } ``` - ## Schema @@ -113,6 +108,8 @@ Optional: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_ingest_pipeline.my_ingest_pipeline / ``` diff --git a/docs/resources/elasticsearch_logstash_pipeline.md b/docs/resources/elasticsearch_logstash_pipeline.md index bf7f49394..e89379a50 100644 --- a/docs/resources/elasticsearch_logstash_pipeline.md +++ b/docs/resources/elasticsearch_logstash_pipeline.md @@ -1,14 +1,15 @@ + --- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_logstash_pipeline Resource - terraform-provider-elasticstack" subcategory: "Logstash" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_logstash_pipeline Resource" description: |- - Creates or updates centrally managed logstash pipelines. + Manage Logstash Pipelines via Centralized Pipeline Management. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-apis.html --- -# Resource: elasticstack_elasticsearch_logstash_pipeline +# elasticstack_elasticsearch_logstash_pipeline (Resource) -Creates or updates centrally managed logstash pipelines. See: https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-apis.html +Manage Logstash Pipelines via Centralized Pipeline Management. See, https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-apis.html ## Example Usage @@ -112,6 +113,8 @@ Optional: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_logstash_pipeline.example / ``` diff --git a/docs/resources/elasticsearch_script.md b/docs/resources/elasticsearch_script.md index f551360af..150f64f8e 100644 --- a/docs/resources/elasticsearch_script.md +++ b/docs/resources/elasticsearch_script.md @@ -1,12 +1,13 @@ + --- -subcategory: "Cluster" -layout: "" -page_title: "Elasticstack: elasticstack_elasticsearch_script Resource" +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "elasticstack_elasticsearch_script Resource - terraform-provider-elasticstack" +subcategory: "Elasticsearch" description: |- - Creates or updates a stored script or search template. + Creates or updates a stored script or search template. See https://www.elastic.co/guide/en/elasticsearch/reference/current/create-stored-script-api.html --- -# Resource: elasticstack_elasticsearch_script +# elasticstack_elasticsearch_script (Resource) Creates or updates a stored script or search template. See https://www.elastic.co/guide/en/elasticsearch/reference/current/create-stored-script-api.html @@ -85,6 +86,8 @@ Optional: Import is supported using the following syntax: +The [`terraform import` command](https://developer.hashicorp.com/terraform/cli/commands/import) can be used, for example: + ```shell terraform import elasticstack_elasticsearch_script.my_script /