From 97bc4d6b19b057107b3c7c8506a7b3b92ca761ea Mon Sep 17 00:00:00 2001 From: Elastic Machine Date: Mon, 29 Jul 2024 06:03:36 +0000 Subject: [PATCH] Auto-generated API code --- docs/reference.asciidoc | 950 ++++++++++++++++++------ src/api/api/async_search.ts | 43 +- src/api/api/bulk.ts | 11 +- src/api/api/cat.ts | 98 ++- src/api/api/clear_scroll.ts | 11 +- src/api/api/close_point_in_time.ts | 8 +- src/api/api/cluster.ts | 51 +- src/api/api/count.ts | 9 +- src/api/api/create.ts | 12 +- src/api/api/delete.ts | 12 +- src/api/api/delete_by_query.ts | 11 +- src/api/api/delete_script.ts | 11 +- src/api/api/enrich.ts | 46 +- src/api/api/eql.ts | 41 +- src/api/api/esql.ts | 10 +- src/api/api/exists.ts | 12 +- src/api/api/exists_source.ts | 12 +- src/api/api/explain.ts | 12 +- src/api/api/field_caps.ts | 13 +- src/api/api/get.ts | 12 +- src/api/api/get_script.ts | 11 +- src/api/api/get_source.ts | 12 +- src/api/api/graph.ts | 11 +- src/api/api/index.ts | 12 +- src/api/api/indices.ts | 360 +++++++-- src/api/api/inference.ts | 41 +- src/api/api/info.ts | 8 +- src/api/api/ingest.ts | 48 +- src/api/api/license.ts | 8 +- src/api/api/logstash.ts | 31 +- src/api/api/mget.ts | 9 +- src/api/api/ml.ts | 509 ++++++++++--- src/api/api/msearch.ts | 9 +- src/api/api/msearch_template.ts | 11 +- src/api/api/mtermvectors.ts | 9 +- src/api/api/open_point_in_time.ts | 11 +- src/api/api/ping.ts | 8 +- src/api/api/put_script.ts | 12 +- src/api/api/query_rules.ts | 295 ++++++++ src/api/api/query_ruleset.ts | 162 ---- src/api/api/rank_eval.ts | 11 +- src/api/api/reindex.ts | 8 +- src/api/api/render_search_template.ts | 11 +- src/api/api/scripts_painless_execute.ts | 8 +- src/api/api/scroll.ts | 9 +- src/api/api/search.ts | 13 +- src/api/api/search_application.ts | 66 +- src/api/api/search_mvt.ts | 15 +- src/api/api/search_template.ts | 11 +- src/api/api/security.ts | 58 +- src/api/api/sql.ts | 40 +- src/api/api/synonyms.ts | 59 +- src/api/api/tasks.ts | 11 +- src/api/api/terms_enum.ts | 9 +- src/api/api/termvectors.ts | 12 +- src/api/api/transform.ts | 101 ++- src/api/api/update.ts | 12 +- src/api/api/update_by_query.ts | 11 +- src/api/index.ts | 14 +- src/api/types.ts | 697 ++++++++++++----- src/api/typesWithBodyKey.ts | 712 +++++++++++++----- 61 files changed, 3683 insertions(+), 1167 deletions(-) create mode 100644 src/api/api/query_rules.ts delete mode 100644 src/api/api/query_ruleset.ts diff --git a/docs/reference.asciidoc b/docs/reference.asciidoc index 87ccc66..72f5286 100644 --- a/docs/reference.asciidoc +++ b/docs/reference.asciidoc @@ -27,7 +27,9 @@ [discrete] === bulk -Allows to perform multiple index/update/delete operations in a single request. +Bulk index or delete documents. +Performs multiple indexing or delete operations in a single API call. +This reduces overhead and can greatly increase indexing speed. {ref}/docs-bulk.html[Endpoint documentation] [source,ts] @@ -56,7 +58,7 @@ Set to all or any positive integer up to the total number of shards in the index [discrete] === clear_scroll -Explicitly clears the search context for a scroll. +Clears the search context and results for a scrolling search. {ref}/clear-scroll-api.html[Endpoint documentation] [source,ts] @@ -72,7 +74,7 @@ To clear all scroll IDs, use `_all`. [discrete] === close_point_in_time -Close a point in time +Closes a point-in-time. {ref}/point-in-time-api.html[Endpoint documentation] [source,ts] @@ -101,7 +103,7 @@ client.count({ ... }) ** *`index` (Optional, string | string[])*: List of data streams, indices, and aliases to search. Supports wildcards (`*`). To search all data streams and indices, omit this parameter or use `*` or `_all`. -** *`query` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: Defines the search definition using the Query DSL. +** *`query` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: Defines the search definition using the Query DSL. ** *`allow_no_indices` (Optional, boolean)*: If `false`, the request returns an error if any wildcard expression, index alias, or `_all` value targets only missing or closed indices. This behavior applies even if the request targets other open indices. ** *`analyzer` (Optional, string)*: Analyzer to use for the query string. @@ -129,9 +131,9 @@ Elasticsearch collects documents before sorting. [discrete] === create -Creates a new document in the index. - -Returns a 409 response when a document with a same ID already exists in the index. +Index a document. +Adds a JSON document to the specified data stream or index and makes it searchable. +If the target is an index and the document already exists, the request updates the document and increments its version. {ref}/docs-index_.html[Endpoint documentation] [source,ts] @@ -162,7 +164,8 @@ Set to `all` or any positive integer up to the total number of shards in the ind [discrete] === delete -Removes a document from the index. +Delete a document. +Removes a JSON document from the specified index. {ref}/docs-delete.html[Endpoint documentation] [source,ts] @@ -189,7 +192,8 @@ Set to `all` or any positive integer up to the total number of shards in the ind [discrete] === delete_by_query -Deletes documents matching the provided query. +Delete documents. +Deletes documents that match the specified query. {ref}/docs-delete-by-query.html[Endpoint documentation] [source,ts] @@ -204,7 +208,7 @@ client.deleteByQuery({ index }) Supports wildcards (`*`). To search all data streams or indices, omit this parameter or use `*` or `_all`. ** *`max_docs` (Optional, number)*: The maximum number of documents to delete. -** *`query` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: Specifies the documents to delete using the Query DSL. +** *`query` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: Specifies the documents to delete using the Query DSL. ** *`slice` (Optional, { field, id, max })*: Slice the request manually using the provided slice ID and total number of slices. ** *`allow_no_indices` (Optional, boolean)*: If `false`, the request returns an error if any wildcard expression, index alias, or `_all` value targets only missing or closed indices. This behavior applies even if the request targets other open indices. @@ -252,7 +256,8 @@ Set to all or any positive integer up to the total number of shards in the index [discrete] === delete_script -Deletes a script. +Delete a script or search template. +Deletes a stored script or search template. {ref}/modules-scripting.html[Endpoint documentation] [source,ts] @@ -271,7 +276,8 @@ If no response is received before the timeout expires, the request fails and ret [discrete] === exists -Returns information about whether a document exists in an index. +Check a document. +Checks if a specified document exists. {ref}/docs-get.html[Endpoint documentation] [source,ts] @@ -302,7 +308,8 @@ The specified version must match the current version of the document for the req [discrete] === exists_source -Returns information about whether a document source exists in an index. +Check for a document source. +Checks if a document's `_source` is stored. {ref}/docs-get.html[Endpoint documentation] [source,ts] @@ -330,7 +337,8 @@ The specified version must match the current version of the document for the req [discrete] === explain -Returns information about why a specific matches (or doesn't match) a query. +Explain a document match result. +Returns information about why a specific document matches, or doesn’t match, a query. {ref}/search-explain.html[Endpoint documentation] [source,ts] @@ -344,7 +352,7 @@ client.explain({ id, index }) ** *`id` (string)*: Defines the document ID. ** *`index` (string)*: Index names used to limit the request. Only a single index name can be provided to this parameter. -** *`query` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: Defines the search definition using the Query DSL. +** *`query` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: Defines the search definition using the Query DSL. ** *`analyzer` (Optional, string)*: Analyzer to use for the query string. This parameter can only be used when the `q` query string parameter is specified. ** *`analyze_wildcard` (Optional, boolean)*: If `true`, wildcard and prefix queries are analyzed. @@ -362,7 +370,9 @@ Random by default. [discrete] === field_caps -Returns the information about the capabilities of fields among multiple indices. +The field capabilities API returns the information about the capabilities of fields among multiple indices. +The field capabilities API returns runtime fields like any other field. For example, a runtime field with a type +of keyword is returned as any other field that belongs to the `keyword` family. {ref}/search-field-caps.html[Endpoint documentation] [source,ts] @@ -374,18 +384,24 @@ client.fieldCaps({ ... }) * *Request (object):* ** *`index` (Optional, string | string[])*: List of data streams, indices, and aliases used to limit the request. Supports wildcards (*). To target all data streams and indices, omit this parameter or use * or _all. -** *`index_filter` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: Allows to filter indices if the provided query rewrites to match_none on every shard. +** *`fields` (Optional, string | string[])*: List of fields to retrieve capabilities for. Wildcard (`*`) expressions are supported. +** *`index_filter` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: Allows to filter indices if the provided query rewrites to match_none on every shard. +** *`runtime_mappings` (Optional, Record)*: Defines ad-hoc runtime fields in the request similar to the way it is done in search requests. +These fields exist only as part of the query and take precedence over fields defined with the same name in the index mappings. ** *`allow_no_indices` (Optional, boolean)*: If false, the request returns an error if any wildcard expression, index alias, or `_all` value targets only missing or closed indices. This behavior applies even if the request targets other open indices. For example, a request targeting `foo*,bar*` returns an error if an index starts with foo but no index starts with bar. ** *`expand_wildcards` (Optional, Enum("all" | "open" | "closed" | "hidden" | "none") | Enum("all" | "open" | "closed" | "hidden" | "none")[])*: Type of index that wildcard patterns can match. If the request can target data streams, this argument determines whether wildcard expressions match hidden data streams. Supports a list of values, such as `open,hidden`. -** *`fields` (Optional, string | string[])*: List of fields to retrieve capabilities for. Wildcard (`*`) expressions are supported. ** *`ignore_unavailable` (Optional, boolean)*: If `true`, missing or closed indices are not included in the response. ** *`include_unmapped` (Optional, boolean)*: If true, unmapped fields are included in the response. +** *`filters` (Optional, string)*: An optional set of filters: can include +metadata,-metadata,-nested,-multifield,-parent +** *`types` (Optional, string[])*: Only return results for fields that have one of the types in the list +** *`include_empty_fields` (Optional, boolean)*: If false, empty fields are not included in the response. [discrete] === get -Returns a document. +Get a document by its ID. +Retrieves the document with the specified ID from an index. {ref}/docs-get.html[Endpoint documentation] [source,ts] @@ -413,7 +429,8 @@ If this field is specified, the `_source` parameter defaults to false. [discrete] === get_script -Returns a script. +Get a script or search template. +Retrieves a stored script or search template. {ref}/modules-scripting.html[Endpoint documentation] [source,ts] @@ -429,6 +446,7 @@ client.getScript({ id }) [discrete] === get_source +Get a document's source. Returns the source of a document. {ref}/docs-get.html[Endpoint documentation] @@ -455,7 +473,9 @@ client.getSource({ id, index }) [discrete] === index -Creates or updates a document in an index. +Index a document. +Adds a JSON document to the specified data stream or index and makes it searchable. +If the target is an index and the document already exists, the request updates the document and increments its version. {ref}/docs-index_.html[Endpoint documentation] [source,ts] @@ -493,6 +513,7 @@ Set to all or any positive integer up to the total number of shards in the index [discrete] === info +Get cluster info. Returns basic information about the cluster. {ref}/index.html[Endpoint documentation] @@ -543,7 +564,7 @@ client.msearch({ ... }) * *Request (object):* ** *`index` (Optional, string | string[])*: List of data streams, indices, and index aliases to search. -** *`searches` (Optional, { allow_no_indices, expand_wildcards, ignore_unavailable, index, preference, request_cache, routing, search_type, ccs_minimize_roundtrips, allow_partial_search_results, ignore_throttled } | { aggregations, collapse, query, explain, ext, stored_fields, docvalue_fields, from, highlight, indices_boost, min_score, post_filter, profile, rescore, script_fields, search_after, size, sort, _source, fields, terminate_after, stats, timeout, track_scores, track_total_hits, version, runtime_mappings, seq_no_primary_term, pit, suggest }[])* +** *`searches` (Optional, { allow_no_indices, expand_wildcards, ignore_unavailable, index, preference, request_cache, routing, search_type, ccs_minimize_roundtrips, allow_partial_search_results, ignore_throttled } | { aggregations, collapse, query, explain, ext, stored_fields, docvalue_fields, knn, from, highlight, indices_boost, min_score, post_filter, profile, rescore, script_fields, search_after, size, sort, _source, fields, terminate_after, stats, timeout, track_scores, track_total_hits, version, runtime_mappings, seq_no_primary_term, pit, suggest }[])* ** *`allow_no_indices` (Optional, boolean)*: If false, the request returns an error if any wildcard expression, index alias, or _all value targets only missing or closed indices. This behavior applies even if the request targets other open indices. For example, a request targeting foo*,bar* returns an error if an index starts with foo but no index starts with bar. ** *`ccs_minimize_roundtrips` (Optional, boolean)*: If true, network roundtrips between the coordinating node and remote clusters are minimized for cross-cluster search requests. ** *`expand_wildcards` (Optional, Enum("all" | "open" | "closed" | "hidden" | "none") | Enum("all" | "open" | "closed" | "hidden" | "none")[])*: Type of index that wildcard expressions can match. If the request can target data streams, this argument determines whether wildcard expressions match hidden data streams. @@ -559,7 +580,7 @@ client.msearch({ ... }) [discrete] === msearch_template -Allows to execute several search template operations in one request. +Runs multiple templated searches with a single request. {ref}/search-multi-search.html[Endpoint documentation] [source,ts] @@ -573,7 +594,7 @@ client.msearchTemplate({ ... }) ** *`index` (Optional, string | string[])*: List of data streams, indices, and aliases to search. Supports wildcards (`*`). To search all data streams and indices, omit this parameter or use `*`. -** *`search_templates` (Optional, { allow_no_indices, expand_wildcards, ignore_unavailable, index, preference, request_cache, routing, search_type, ccs_minimize_roundtrips, allow_partial_search_results, ignore_throttled } | { aggregations, collapse, query, explain, ext, stored_fields, docvalue_fields, from, highlight, indices_boost, min_score, post_filter, profile, rescore, script_fields, search_after, size, sort, _source, fields, terminate_after, stats, timeout, track_scores, track_total_hits, version, runtime_mappings, seq_no_primary_term, pit, suggest }[])* +** *`search_templates` (Optional, { allow_no_indices, expand_wildcards, ignore_unavailable, index, preference, request_cache, routing, search_type, ccs_minimize_roundtrips, allow_partial_search_results, ignore_throttled } | { aggregations, collapse, query, explain, ext, stored_fields, docvalue_fields, knn, from, highlight, indices_boost, min_score, post_filter, profile, rescore, script_fields, search_after, size, sort, _source, fields, terminate_after, stats, timeout, track_scores, track_total_hits, version, runtime_mappings, seq_no_primary_term, pit, suggest }[])* ** *`ccs_minimize_roundtrips` (Optional, boolean)*: If `true`, network round-trips are minimized for cross-cluster search requests. ** *`max_concurrent_searches` (Optional, number)*: Maximum number of concurrent searches the API can run. ** *`search_type` (Optional, Enum("query_then_fetch" | "dfs_query_then_fetch"))*: The type of the search operation. @@ -614,7 +635,12 @@ Random by default. [discrete] === open_point_in_time -Open a point in time that can be used in subsequent searches +A search request by default executes against the most recent visible data of the target indices, +which is called point in time. Elasticsearch pit (point in time) is a lightweight view into the +state of the data as it existed when initiated. In some cases, it’s preferred to perform multiple +search requests using the same point in time. For example, if refreshes happen between +`search_after` requests, then the results of those requests might not be consistent as changes happening +between searches are only visible to the more recent point in time. {ref}/point-in-time-api.html[Endpoint documentation] [source,ts] @@ -637,6 +663,7 @@ Supports a list of values, such as `open,hidden`. Valid values are: `all`, `open [discrete] === ping +Ping the cluster. Returns whether the cluster is running. {ref}/index.html[Endpoint documentation] @@ -647,7 +674,8 @@ client.ping() [discrete] === put_script -Creates or updates a script. +Create or update a script or search template. +Creates or updates a stored script or search template. {ref}/modules-scripting.html[Endpoint documentation] [source,ts] @@ -670,7 +698,7 @@ If no response is received before the timeout expires, the request fails and ret [discrete] === rank_eval -Allows to evaluate the quality of ranked search results over a set of typical search queries +Enables you to evaluate the quality of ranked search results over a set of typical search queries. {ref}/search-rank-eval.html[Endpoint documentation] [source,ts] @@ -692,9 +720,8 @@ To target all data streams and indices in a cluster, omit this parameter or use [discrete] === reindex -Allows to copy documents from one index to another, optionally filtering the source -documents by a query, changing the destination index settings, or fetching the -documents from a remote cluster. +Reindex documents. +Copies documents from a source to a destination. The source can be any existing index, alias, or data stream. The destination must differ from the source. For example, you cannot reindex a data stream into itself. {ref}/docs-reindex.html[Endpoint documentation] [source,ts] @@ -709,7 +736,7 @@ client.reindex({ dest, source }) ** *`source` ({ index, query, remote, size, slice, sort, _source, runtime_mappings })*: The source you are copying from. ** *`conflicts` (Optional, Enum("abort" | "proceed"))*: Set to proceed to continue reindexing even if there are conflicts. ** *`max_docs` (Optional, number)*: The maximum number of documents to reindex. -** *`script` (Optional, { lang, options, source } | { id })*: The script to run to update the document source or metadata when reindexing. +** *`script` (Optional, { source, id, params, lang, options })*: The script to run to update the document source or metadata when reindexing. ** *`size` (Optional, number)* ** *`refresh` (Optional, boolean)*: If `true`, the request refreshes affected shards to make this operation visible to search. ** *`requests_per_second` (Optional, float)*: The throttle for this request in sub-requests per second. @@ -725,7 +752,7 @@ Set to `all` or any positive integer up to the total number of shards in the ind [discrete] === render_search_template -Allows to use the Mustache language to pre-render a search definition. +Renders a search template as a search request body. {ref}/render-search-template-api.html[Endpoint documentation] [source,ts] @@ -749,7 +776,8 @@ If no `id` or `` is specified, this parameter is required. [discrete] === scripts_painless_execute -Allows an arbitrary script to be executed and a result to be returned +Run a script. +Runs a script and returns a result. {painless}/painless-execute-api.html[Endpoint documentation] [source,ts] @@ -762,7 +790,7 @@ client.scriptsPainlessExecute({ ... }) * *Request (object):* ** *`context` (Optional, string)*: The context that the script should run in. ** *`context_setup` (Optional, { document, index, query })*: Additional parameters for the `context`. -** *`script` (Optional, { lang, options, source })*: The Painless script to execute. +** *`script` (Optional, { source, id, params, lang, options })*: The Painless script to execute. [discrete] === scroll @@ -783,7 +811,9 @@ client.scroll({ scroll_id }) [discrete] === search -Returns results matching a query. +Returns search hits that match the query defined in the request. +You can provide search queries using the `q` query string parameter or the request body. +If both are specified, only the query parameter is used. {ref}/search-search.html[Endpoint documentation] [source,ts] @@ -797,7 +827,7 @@ client.search({ ... }) ** *`index` (Optional, string | string[])*: List of data streams, indices, and aliases to search. Supports wildcards (`*`). To search all data streams and indices, omit this parameter or use `*` or `_all`. -** *`aggregations` (Optional, Record)*: Defines the aggregations that are run as part of the search request. +** *`aggregations` (Optional, Record)*: Defines the aggregations that are run as part of the search request. ** *`collapse` (Optional, { field, inner_hits, max_concurrent_group_searches, collapse })*: Collapses search results the values of the specified field. ** *`explain` (Optional, boolean)*: If true, returns detailed information about score computation as part of a hit. ** *`ext` (Optional, Record)*: Configuration of search extensions defined by Elasticsearch plugins. @@ -812,15 +842,18 @@ If `false`, the response does not include the total number of hits matching the ** *`indices_boost` (Optional, Record[])*: Boosts the _score of documents from specified indices. ** *`docvalue_fields` (Optional, { field, format, include_unmapped }[])*: Array of wildcard (`*`) patterns. The request returns doc values for field names matching these patterns in the `hits.fields` property of the response. +** *`knn` (Optional, { field, query_vector, query_vector_builder, k, num_candidates, boost, filter, similarity, inner_hits } | { field, query_vector, query_vector_builder, k, num_candidates, boost, filter, similarity, inner_hits }[])*: Defines the approximate kNN search to run. +** *`rank` (Optional, { rrf })*: Defines the Reciprocal Rank Fusion (RRF) to use. ** *`min_score` (Optional, number)*: Minimum `_score` for matching documents. Documents with a lower `_score` are not included in the search results. -** *`post_filter` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: Use the `post_filter` parameter to filter search results. +** *`post_filter` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: Use the `post_filter` parameter to filter search results. The search hits are filtered after the aggregations are calculated. A post filter has no impact on the aggregation results. ** *`profile` (Optional, boolean)*: Set to `true` to return detailed timing information about the execution of individual components in a search request. NOTE: This is a debugging tool and adds significant overhead to search execution. -** *`query` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: Defines the search definition using the Query DSL. +** *`query` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: Defines the search definition using the Query DSL. ** *`rescore` (Optional, { window_size, query, learning_to_rank } | { window_size, query, learning_to_rank }[])*: Can be used to improve precision by reordering just the top (for example 100 - 500) documents returned by the `query` and `post_filter` phases. +** *`retriever` (Optional, { standard, knn, rrf })*: A retriever is a specification to describe top documents returned from a search. A retriever replaces other elements of the search API that also return top documents such as query and knn. ** *`script_fields` (Optional, Record)*: Retrieve a script evaluation (based on different fields) for each hit. ** *`search_after` (Optional, number | number | string | boolean | null | User-defined value[])*: Used to retrieve the next page of hits using a set of sort values from the previous page. ** *`size` (Optional, number)*: The number of hits to return. @@ -926,7 +959,8 @@ Query parameter searches do not support the full Elasticsearch Query DSL but are [discrete] === search_mvt -Searches a vector tile for geospatial values. Returns results as a binary Mapbox vector tile. +Search a vector tile. +Searches a vector tile for geospatial values. {ref}/search-vector-tile-api.html[Endpoint documentation] [source,ts] @@ -942,7 +976,7 @@ client.searchMvt({ index, field, zoom, x, y }) ** *`zoom` (number)*: Zoom level for the vector tile to search ** *`x` (number)*: X coordinate for the vector tile to search ** *`y` (number)*: Y coordinate for the vector tile to search -** *`aggs` (Optional, Record)*: Sub-aggregations for the geotile_grid. +** *`aggs` (Optional, Record)*: Sub-aggregations for the geotile_grid. Supports the following aggregation types: - avg @@ -969,7 +1003,7 @@ don’t include the aggs layer. each feature represents a geotile_grid cell. If 'grid' each feature is a Polygon of the cells bounding box. If 'point' each feature is a Point that is the centroid of the cell. -** *`query` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: Query DSL used to filter documents for the search. +** *`query` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: Query DSL used to filter documents for the search. ** *`runtime_mappings` (Optional, Record)*: Defines one or more runtime fields in the search request. These fields take precedence over mapped fields with the same name. ** *`size` (Optional, number)*: Maximum number of features to return in the hits layer. Accepts 0-10000. @@ -985,7 +1019,7 @@ suggested label positions for the original features. [discrete] === search_template -Allows to use the Mustache language to pre-render a search definition. +Runs a search with a search template. {ref}/search-template.html[Endpoint documentation] [source,ts] @@ -1024,6 +1058,7 @@ Random by default. ** *`scroll` (Optional, string | -1 | 0)*: Specifies how long a consistent view of the index should be maintained for scrolled search. ** *`search_type` (Optional, Enum("query_then_fetch" | "dfs_query_then_fetch"))*: The type of the search operation. +** *`rest_total_hits_as_int` (Optional, boolean)*: If true, hits.total are rendered as an integer in the response. ** *`typed_keys` (Optional, boolean)*: If `true`, the response prefixes aggregation and suggester names with their respective types. [discrete] @@ -1044,12 +1079,13 @@ client.termsEnum({ index, field }) ** *`size` (Optional, number)*: How many matching terms to return. ** *`timeout` (Optional, string | -1 | 0)*: The maximum length of time to spend collecting results. Defaults to "1s" (one second). If the timeout is exceeded the complete flag set to false in the response and the results may be partial or empty. ** *`case_insensitive` (Optional, boolean)*: When true the provided search string is matched against index terms without case sensitivity. -** *`index_filter` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: Allows to filter an index shard if the provided query rewrites to match_none. +** *`index_filter` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: Allows to filter an index shard if the provided query rewrites to match_none. ** *`string` (Optional, string)*: The string after which terms in the index should be returned. Allows for a form of pagination if the last result from one request is passed as the search_after parameter for a subsequent request. ** *`search_after` (Optional, string)* [discrete] === termvectors +Get term vector information. Returns information and statistics about terms in the fields of a particular document. {ref}/docs-termvectors.html[Endpoint documentation] @@ -1082,7 +1118,8 @@ Random by default. [discrete] === update -Updates a document with a script or partial document. +Update a document. +Updates a document by running a script or passing a partial document. {ref}/docs-update.html[Endpoint documentation] [source,ts] @@ -1099,7 +1136,7 @@ client.update({ id, index }) to 'noop' if no change to the document occurred. ** *`doc` (Optional, object)*: A partial update to an existing document. ** *`doc_as_upsert` (Optional, boolean)*: Set to true to use the contents of 'doc' as the value of 'upsert' -** *`script` (Optional, { lang, options, source } | { id })*: Script to execute to update the document. +** *`script` (Optional, { source, id, params, lang, options })*: Script to execute to update the document. ** *`scripted_upsert` (Optional, boolean)*: Set to true to execute the script whether or not the document exists. ** *`_source` (Optional, boolean | { excludes, includes })*: Set to false to disable source retrieval. You can also specify a comma-separated list of the fields you want to retrieve. @@ -1125,9 +1162,9 @@ Set to 'all' or any positive integer up to the total number of shards in the ind [discrete] === update_by_query -Updates documents that match the specified query. If no query is specified, - performs an update on every document in the index without changing the source, -for example to pick up a mapping change. +Update documents. +Updates documents that match the specified query. +If no query is specified, performs an update on every document in the data stream or index without modifying the source, which is useful for picking up mapping changes. {ref}/docs-update-by-query.html[Endpoint documentation] [source,ts] @@ -1142,8 +1179,8 @@ client.updateByQuery({ index }) Supports wildcards (`*`). To search all data streams or indices, omit this parameter or use `*` or `_all`. ** *`max_docs` (Optional, number)*: The maximum number of documents to update. -** *`query` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: Specifies the documents to update using the Query DSL. -** *`script` (Optional, { lang, options, source } | { id })*: The script to run to update the document source or metadata when updating. +** *`query` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: Specifies the documents to update using the Query DSL. +** *`script` (Optional, { source, id, params, lang, options })*: The script to run to update the document source or metadata when updating. ** *`slice` (Optional, { field, id, max })*: Slice the request manually using the provided slice ID and total number of slices. ** *`conflicts` (Optional, Enum("abort" | "proceed"))*: What to do if update by query hits version conflicts: `abort` or `proceed`. ** *`allow_no_indices` (Optional, boolean)*: If `false`, the request returns an error if any wildcard expression, index alias, or `_all` value targets only missing or closed indices. @@ -1194,7 +1231,10 @@ Set to `all` or any positive integer up to the total number of shards in the ind === async_search [discrete] ==== delete -Deletes an async search by ID. If the search is still running, the search request will be cancelled. Otherwise, the saved search results are deleted. +Deletes an async search by identifier. +If the search is still running, the search request will be cancelled. +Otherwise, the saved search results are deleted. +If the Elasticsearch security features are enabled, the deletion of a specific async search is restricted to: the authenticated user that submitted the original search request; users that have the `cancel_task` cluster privilege. {ref}/async-search.html[Endpoint documentation] [source,ts] @@ -1210,7 +1250,8 @@ client.asyncSearch.delete({ id }) [discrete] ==== get -Retrieves the results of a previously submitted async search request given its ID. +Retrieves the results of a previously submitted async search request given its identifier. +If the Elasticsearch security features are enabled, access to the results of a specific async search is restricted to the user or API key that submitted it. {ref}/async-search.html[Endpoint documentation] [source,ts] @@ -1235,7 +1276,9 @@ By default no timeout is set meaning that the currently available results will b [discrete] ==== status -Retrieves the status of a previously submitted async search request given its ID. +Get async search status +Retrieves the status of a previously submitted async search request given its identifier, without retrieving search results. +If the Elasticsearch security features are enabled, use of this API is restricted to the `monitoring_user` role. {ref}/async-search.html[Endpoint documentation] [source,ts] @@ -1251,7 +1294,11 @@ client.asyncSearch.status({ id }) [discrete] ==== submit -Executes a search request asynchronously. +Runs a search request asynchronously. +When the primary sort of the results is an indexed field, shards get sorted based on minimum and maximum value that they hold for that field, hence partial results become available following the sort criteria that was requested. +Warning: Async search does not support scroll nor search requests that only include the suggest section. +By default, Elasticsearch doesn’t allow you to store an async search response larger than 10Mb and an attempt to do this results in an error. +The maximum allowed size for a stored async search response can be set by changing the `search.max_async_search_response_size` cluster level setting. {ref}/async-search.html[Endpoint documentation] [source,ts] @@ -1264,7 +1311,7 @@ client.asyncSearch.submit({ ... }) * *Request (object):* ** *`index` (Optional, string | string[])*: A list of index names to search; use `_all` or empty string to perform the operation on all indices -** *`aggregations` (Optional, Record)* +** *`aggregations` (Optional, Record)* ** *`collapse` (Optional, { field, inner_hits, max_concurrent_group_searches, collapse })* ** *`explain` (Optional, boolean)*: If true, returns detailed information about score computation as part of a hit. ** *`ext` (Optional, Record)*: Configuration of search extensions defined by Elasticsearch plugins. @@ -1279,11 +1326,12 @@ Defaults to 10,000 hits. ** *`indices_boost` (Optional, Record[])*: Boosts the _score of documents from specified indices. ** *`docvalue_fields` (Optional, { field, format, include_unmapped }[])*: Array of wildcard (*) patterns. The request returns doc values for field names matching these patterns in the hits.fields property of the response. +** *`knn` (Optional, { field, query_vector, query_vector_builder, k, num_candidates, boost, filter, similarity, inner_hits } | { field, query_vector, query_vector_builder, k, num_candidates, boost, filter, similarity, inner_hits }[])*: Defines the approximate kNN search to run. ** *`min_score` (Optional, number)*: Minimum _score for matching documents. Documents with a lower _score are not included in the search results. -** *`post_filter` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })* +** *`post_filter` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })* ** *`profile` (Optional, boolean)* -** *`query` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: Defines the search definition using the Query DSL. +** *`query` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: Defines the search definition using the Query DSL. ** *`rescore` (Optional, { window_size, query, learning_to_rank } | { window_size, query, learning_to_rank }[])* ** *`script_fields` (Optional, Record)*: Retrieve a script evaluation (based on different fields) for each hit. ** *`search_after` (Optional, number | number | string | boolean | null | User-defined value[])* @@ -1358,7 +1406,11 @@ A partial reduction is performed every time the coordinating node has received a === cat [discrete] ==== aliases -Shows information about currently configured aliases to indices including filter and routing infos. +Get aliases. +Retrieves the cluster’s index aliases, including filter and routing information. +The API does not return data stream aliases. +> info +> CAT APIs are only intended for human consumption using the command line or the Kibana console. They are not intended for use by applications. For application consumption, use [the /_alias endpoints](#endpoint-alias). {ref}/cat-alias.html[Endpoint documentation] [source,ts] @@ -1375,7 +1427,12 @@ client.cat.aliases({ ... }) [discrete] ==== component_templates -Returns information about existing component_templates templates. +Get component templates. +Returns information about component templates in a cluster. +Component templates are building blocks for constructing index templates that specify index mappings, settings, and aliases. +> info +> CAT APIs are only intended for human consumption using the command line or Kibana console. +They are not intended for use by applications. For application consumption, use [the /_component_template endpoints](#endpoint-component-template). {ref}/cat-component-templates.html[Endpoint documentation] [source,ts] @@ -1391,7 +1448,12 @@ client.cat.componentTemplates({ ... }) [discrete] ==== count -Provides quick access to the document count of the entire cluster, or individual indices. +Get a document count. +Provides quick access to a document count for a data stream, an index, or an entire cluster.n/ +The document count only includes live documents, not deleted documents which have not yet been removed by the merge process. +> info +> CAT APIs are only intended for human consumption using the command line or Kibana console. +They are not intended for use by applications. For application consumption, use [the /_count endpoints](#endpoint-count). {ref}/cat-count.html[Endpoint documentation] [source,ts] @@ -1408,7 +1470,8 @@ Supports wildcards (`*`). To target all data streams and indices, omit this para [discrete] ==== help -Returns help for the Cat APIs. +Get CAT help. +Returns help for the CAT APIs. {ref}/cat.html[Endpoint documentation] [source,ts] @@ -1419,7 +1482,21 @@ client.cat.help() [discrete] ==== indices -Returns information about indices: number of primaries and replicas, document counts, disk size, ... +Get index information. +Returns high-level information about indices in a cluster, including backing indices for data streams. +> info +> CAT APIs are only intended for human consumption using the command line or Kibana console. +They are not intended for use by applications. For application consumption, use an index endpoint. + +Use this request to get the following information for each index in a cluster: +- shard count +- document count +- deleted document count +- primary store size +- total store size of all shards, including shard replicas + +These metrics are retrieved directly from Lucene, which Elasticsearch uses internally to power indexing and search. As a result, all document counts include hidden nested documents. +To get an accurate count of Elasticsearch documents, use the [/_cat/count](#operation-cat-count) or [count](#endpoint-count) endpoints. {ref}/cat-indices.html[Endpoint documentation] [source,ts] @@ -1442,7 +1519,13 @@ Supports wildcards (`*`). To target all data streams and indices, omit this para [discrete] ==== ml_data_frame_analytics -Gets configuration and usage information about data frame analytics jobs. +Get data frame analytics jobs. +Returns configuration and usage information about data frame analytics jobs. + +> info +> CAT APIs are only intended for human consumption using the Kibana +console or command line. They are not intended for use by applications. For +application consumption, use [the /_ml/data_frame/analytics endpoints](#endpoint-ml). {ref}/cat-dfanalytics.html[Endpoint documentation] [source,ts] @@ -1464,7 +1547,16 @@ response. [discrete] ==== ml_datafeeds -Gets configuration and usage information about datafeeds. +Get datafeeds. +Returns configuration and usage information about datafeeds. +This API returns a maximum of 10,000 datafeeds. +If the Elasticsearch security features are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` +cluster privileges to use this API. + +> info +> CAT APIs are only intended for human consumption using the Kibana +console or command line. They are not intended for use by applications. For +application consumption, use [the /_ml/datafeeds endpoints](#endpoint-ml). {ref}/cat-datafeeds.html[Endpoint documentation] [source,ts] @@ -1492,7 +1584,16 @@ partial matches. [discrete] ==== ml_jobs -Gets configuration and usage information about anomaly detection jobs. +Get anomaly detection jobs. +Returns configuration and usage information for anomaly detection jobs. +This API returns a maximum of 10,000 jobs. +If the Elasticsearch security features are enabled, you must have `monitor_ml`, +`monitor`, `manage_ml`, or `manage` cluster privileges to use this API. + +> info +> CAT APIs are only intended for human consumption using the Kibana +console or command line. They are not intended for use by applications. For +application consumption, use [the /_ml/anomaly_detectors endpoints](#endpoint-ml). {ref}/cat-anomaly-detectors.html[Endpoint documentation] [source,ts] @@ -1521,7 +1622,13 @@ matches. [discrete] ==== ml_trained_models -Gets configuration and usage information about inference trained models. +Get trained models. +Returns configuration and usage information about inference trained models. + +> info +> CAT APIs are only intended for human consumption using the Kibana +console or command line. They are not intended for use by applications. For +application consumption, use [the /_ml/trained_models endpoints](#endpoint-ml). {ref}/cat-trained-model.html[Endpoint documentation] [source,ts] @@ -1545,7 +1652,13 @@ If `false`, the API returns a 404 status code when there are no matches or only [discrete] ==== transforms -Gets configuration and usage information about transforms. +Get transforms. +Returns configuration and usage information about transforms. + +> info +> CAT APIs are only intended for human consumption using the Kibana +console or command line. They are not intended for use by applications. For +application consumption, use [the /_transform endpoints](#endpoint-transform). {ref}/cat-transforms.html[Endpoint documentation] [source,ts] @@ -1572,7 +1685,9 @@ If `false`, the request returns a 404 status code when there are no matches or o === cluster [discrete] ==== delete_component_template -Deletes a component template +Delete component templates. +Deletes component templates. +Component templates are building blocks for constructing index templates that specify index mappings, settings, and aliases. {ref}/indices-component-template.html[Endpoint documentation] [source,ts] @@ -1592,7 +1707,8 @@ If no response is received before the timeout expires, the request fails and ret [discrete] ==== exists_component_template -Returns information about whether a particular component template exist +Check component templates. +Returns information about whether a particular component template exists. {ref}/indices-component-template.html[Endpoint documentation] [source,ts] @@ -1614,7 +1730,8 @@ Defaults to false, which means information is retrieved from the master node. [discrete] ==== get_component_template -Returns one or more component templates +Get component templates. +Retrieves information about component templates. {ref}/indices-component-template.html[Endpoint documentation] [source,ts] @@ -1629,6 +1746,7 @@ client.cluster.getComponentTemplate({ ... }) ** *`name` (Optional, string)*: List of component template names used to limit the request. Wildcard (`*`) expressions are supported. ** *`flat_settings` (Optional, boolean)*: If `true`, returns settings in flat format. +** *`include_defaults` (Optional, boolean)*: Return all default configurations for the component template (default: false) ** *`local` (Optional, boolean)*: If `true`, the request retrieves information from the local node only. If `false`, information is retrieved from the master node. ** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node. @@ -1636,7 +1754,8 @@ If no response is received before the timeout expires, the request fails and ret [discrete] ==== info -Returns different information about the cluster. +Get cluster info. +Returns basic information about the cluster. {ref}/cluster-info.html[Endpoint documentation] [source,ts] @@ -1652,7 +1771,22 @@ client.cluster.info({ target }) [discrete] ==== put_component_template -Creates or updates a component template +Create or update a component template. +Creates or updates a component template. +Component templates are building blocks for constructing index templates that specify index mappings, settings, and aliases. + +An index template can be composed of multiple component templates. +To use a component template, specify it in an index template’s `composed_of` list. +Component templates are only applied to new data streams and indices as part of a matching index template. + +Settings and mappings specified directly in the index template or the create index request override any settings or mappings specified in a component template. + +Component templates are only used during index creation. +For data streams, this includes data stream creation and the creation of a stream’s backing indices. +Changes to component templates do not affect existing indices, including a stream’s backing indices. + +You can use C-style `/* *\/` block comments in component templates. +You can include comments anywhere in the request body except before the opening curly bracket. {ref}/indices-component-template.html[Endpoint documentation] [source,ts] @@ -1669,7 +1803,7 @@ Elasticsearch includes the following built-in component templates: `logs-mapping Elastic Agent uses these templates to configure backing indices for its data streams. If you use Elastic Agent and want to overwrite one of these templates, set the `version` for your replacement template higher than the current version. If you don’t use Elastic Agent and want to disable all built-in component and index templates, set `stack.templates.enabled` to `false` using the cluster update settings API. -** *`template` ({ aliases, mappings, settings, defaults, data_stream })*: The template to be applied which includes mappings, settings, or aliases configuration. +** *`template` ({ aliases, mappings, settings, defaults, data_stream, lifecycle })*: The template to be applied which includes mappings, settings, or aliases configuration. ** *`version` (Optional, number)*: Version number used to manage component templates externally. This number isn't automatically generated or incremented by Elasticsearch. To unset a version, replace the template without specifying a version. @@ -1687,6 +1821,7 @@ If no response is received before the timeout expires, the request fails and ret === enrich [discrete] ==== delete_policy +Delete an enrich policy. Deletes an existing enrich policy and its enrich index. {ref}/delete-enrich-policy-api.html[Endpoint documentation] @@ -1720,7 +1855,8 @@ client.enrich.executePolicy({ name }) [discrete] ==== get_policy -Gets information about an enrich policy. +Get an enrich policy. +Returns information about an enrich policy. {ref}/get-enrich-policy-api.html[Endpoint documentation] [source,ts] @@ -1737,7 +1873,8 @@ To return information for all enrich policies, omit this parameter. [discrete] ==== put_policy -Creates a new enrich policy. +Create an enrich policy. +Creates an enrich policy. {ref}/put-enrich-policy-api.html[Endpoint documentation] [source,ts] @@ -1756,7 +1893,8 @@ client.enrich.putPolicy({ name }) [discrete] ==== stats -Gets enrich coordinator statistics and information about enrich policies that are currently executing. +Get enrich stats. +Returns enrich coordinator statistics and information about enrich policies that are currently executing. {ref}/enrich-stats-api.html[Endpoint documentation] [source,ts] @@ -1769,7 +1907,8 @@ client.enrich.stats() === eql [discrete] ==== delete -Deletes an async EQL search by ID. If the search is still running, the search request will be cancelled. Otherwise, the saved search results are deleted. +Deletes an async EQL search or a stored synchronous EQL search. +The API also deletes results for the search. {ref}/eql-search-api.html[Endpoint documentation] [source,ts] @@ -1787,7 +1926,7 @@ A search ID is also provided if the request’s `keep_on_completion` parameter i [discrete] ==== get -Returns async results from previously executed Event Query Language (EQL) search +Returns the current status and available results for an async EQL search or a stored synchronous EQL search. {ref}/get-async-eql-search-api.html[Endpoint documentation] [source,ts] @@ -1807,7 +1946,7 @@ Defaults to no timeout, meaning the request waits for complete search results. [discrete] ==== get_status -Returns the status of a previously submitted async or stored Event Query Language (EQL) search +Returns the current status for an async EQL search or a stored synchronous EQL search without returning results. {ref}/get-async-eql-status-api.html[Endpoint documentation] [source,ts] @@ -1842,13 +1981,14 @@ client.eql.search({ index, query }) ** *`tiebreaker_field` (Optional, string)*: Field used to sort hits with the same timestamp in ascending order ** *`timestamp_field` (Optional, string)*: Field containing event timestamp. Default "@timestamp" ** *`fetch_size` (Optional, number)*: Maximum number of events to search at a time for sequence queries. -** *`filter` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type } | { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type }[])*: Query, written in Query DSL, used to filter the events on which the EQL query runs. +** *`filter` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type } | { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type }[])*: Query, written in Query DSL, used to filter the events on which the EQL query runs. ** *`keep_alive` (Optional, string | -1 | 0)* ** *`keep_on_completion` (Optional, boolean)* ** *`wait_for_completion_timeout` (Optional, string | -1 | 0)* ** *`size` (Optional, number)*: For basic queries, the maximum number of matching events to return. Defaults to 10 ** *`fields` (Optional, { field, format, include_unmapped } | { field, format, include_unmapped }[])*: Array of wildcard (*) patterns. The response returns values for field names matching these patterns in the fields property of each hit. ** *`result_position` (Optional, Enum("tail" | "head"))* +** *`runtime_mappings` (Optional, Record)* ** *`allow_no_indices` (Optional, boolean)* ** *`expand_wildcards` (Optional, Enum("all" | "open" | "closed" | "hidden" | "none") | Enum("all" | "open" | "closed" | "hidden" | "none")[])* ** *`ignore_unavailable` (Optional, boolean)*: If true, missing or closed indices are not included in the response. @@ -1857,7 +1997,7 @@ client.eql.search({ index, query }) === esql [discrete] ==== query -Executes an ESQL request +Executes an ES|QL request {ref}/esql-rest.html[Endpoint documentation] [source,ts] @@ -1871,17 +2011,25 @@ client.esql.query({ query }) * *Request (object):* ** *`query` (string)*: The ES|QL query API accepts an ES|QL query string in the query parameter, runs it, and returns the results. ** *`columnar` (Optional, boolean)*: By default, ES|QL returns results as rows. For example, FROM returns each individual document as one row. For the JSON, YAML, CBOR and smile formats, ES|QL can return the results in a columnar fashion where one row represents all the values of a certain column in the results. -** *`filter` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: Specify a Query DSL query in the filter parameter to filter the set of documents that an ES|QL query runs on. +** *`filter` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: Specify a Query DSL query in the filter parameter to filter the set of documents that an ES|QL query runs on. ** *`locale` (Optional, string)* -** *`params` (Optional, number | number | string | boolean | null[])*: To avoid any attempts of hacking or code injection, extract the values in a separate list of parameters. Use question mark placeholders (?) in the query string for each of the parameters. +** *`params` (Optional, number | number | string | boolean | null | User-defined value[])*: To avoid any attempts of hacking or code injection, extract the values in a separate list of parameters. Use question mark placeholders (?) in the query string for each of the parameters. +** *`profile` (Optional, boolean)*: If provided and `true` the response will include an extra `profile` object +with information on how the query was executed. This information is for human debugging +and its format can change at any time but it can give some insight into the performance +of each part of the query. +** *`tables` (Optional, Record>)*: Tables to use with the LOOKUP operation. The top level key is the table +name and the next level key is the column name. ** *`format` (Optional, string)*: A short version of the Accept header, e.g. json, yaml. ** *`delimiter` (Optional, string)*: The character to use between values within a CSV row. Only valid for the CSV format. +** *`drop_null_columns` (Optional, boolean)*: Should columns that are entirely `null` be removed from the `columns` and `values` portion of the results? +Defaults to `false`. If `true` then the response will include an extra section under the name `all_columns` which has the name of all columns. [discrete] === graph [discrete] ==== explore -Explore extracted and summarized information about the documents and terms in an index. +Extracts and summarizes information about the documents and terms in an Elasticsearch data stream or index. {ref}/graph-explore-api.html[Endpoint documentation] [source,ts] @@ -1896,7 +2044,7 @@ client.graph.explore({ index }) ** *`index` (string | string[])*: Name of the index. ** *`connections` (Optional, { connections, query, vertices })*: Specifies or more fields from which you want to extract terms that are associated with the specified vertices. ** *`controls` (Optional, { sample_diversity, sample_size, timeout, use_significance })*: Direct the Graph API how to build the graph. -** *`query` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: A seed query that identifies the documents of interest. Can be any valid Elasticsearch query. +** *`query` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: A seed query that identifies the documents of interest. Can be any valid Elasticsearch query. ** *`vertices` (Optional, { exclude, field, include, min_doc_count, shard_min_doc_count, size }[])*: Specifies one or more fields that contain the terms you want to include in the graph as vertices. ** *`routing` (Optional, string)*: Custom value used to route operations to a specific shard. ** *`timeout` (Optional, string | -1 | 0)*: Specifies the period of time to wait for a response from each shard. @@ -1907,7 +2055,8 @@ Defaults to no timeout. === indices [discrete] ==== add_block -Adds a block to an index. +Add an index block. +Limits the operations allowed on an index by blocking specific operation types. {ref}/index-modules-blocks.html[Endpoint documentation] [source,ts] @@ -1929,7 +2078,7 @@ client.indices.addBlock({ index, block }) [discrete] ==== analyze -Performs the analysis process on a text and return the tokens breakdown of the text. +Performs analysis on a text string and returns the resulting tokens. {ref}/indices-analyze.html[Endpoint documentation] [source,ts] @@ -1960,7 +2109,8 @@ If an array of strings is provided, it is analyzed as a multi-value field. [discrete] ==== create -Creates an index with optional settings and mappings. +Create an index. +Creates a new index. {ref}/indices-create-index.html[Endpoint documentation] [source,ts] @@ -1974,7 +2124,7 @@ client.indices.create({ index }) * *Request (object):* ** *`index` (string)*: Name of the index you wish to create. ** *`aliases` (Optional, Record)*: Aliases for the index. -** *`mappings` (Optional, { all_field, date_detection, dynamic, dynamic_date_formats, dynamic_templates, _field_names, index_field, _meta, numeric_detection, properties, _routing, _size, _source, runtime, enabled, subobjects })*: Mapping for fields in the index. If specified, this mapping can include: +** *`mappings` (Optional, { all_field, date_detection, dynamic, dynamic_date_formats, dynamic_templates, _field_names, index_field, _meta, numeric_detection, properties, _routing, _size, _source, runtime, enabled, subobjects, _data_stream_timestamp })*: Mapping for fields in the index. If specified, this mapping can include: - Field names - Field data types - Mapping parameters @@ -1988,7 +2138,9 @@ Set to `all` or any positive integer up to the total number of shards in the ind [discrete] ==== create_data_stream -Creates a data stream +Create a data stream. +Creates a data stream. +You must have a matching index template with data stream enabled. {ref}/data-streams.html[Endpoint documentation] [source,ts] @@ -2009,7 +2161,8 @@ Cannot be longer than 255 bytes. Multi-byte characters count towards this limit [discrete] ==== data_streams_stats -Provides statistics on operations happening in a data stream. +Get data stream stats. +Retrieves statistics for one or more data streams. {ref}/data-streams.html[Endpoint documentation] [source,ts] @@ -2029,7 +2182,8 @@ Supports a list of values, such as `open,hidden`. [discrete] ==== delete -Deletes an index. +Delete indices. +Deletes one or more indices. {ref}/indices-delete-index.html[Endpoint documentation] [source,ts] @@ -2059,7 +2213,8 @@ If no response is received before the timeout expires, the request fails and ret [discrete] ==== delete_alias -Deletes an alias. +Delete an alias. +Removes a data stream or index from an alias. {ref}/indices-aliases.html[Endpoint documentation] [source,ts] @@ -2082,7 +2237,8 @@ If no response is received before the timeout expires, the request fails and ret [discrete] ==== delete_data_lifecycle -Deletes the data stream lifecycle of the selected data streams. +Delete data stream lifecycles. +Removes the data stream lifecycle from a data stream, rendering it not managed by the data stream lifecycle. {ref}/data-streams-delete-lifecycle.html[Endpoint documentation] [source,ts] @@ -2101,7 +2257,8 @@ client.indices.deleteDataLifecycle({ name }) [discrete] ==== delete_data_stream -Deletes a data stream. +Delete data streams. +Deletes one or more data streams and their backing indices. {ref}/data-streams.html[Endpoint documentation] [source,ts] @@ -2118,7 +2275,10 @@ client.indices.deleteDataStream({ name }) [discrete] ==== delete_index_template -Deletes an index template. +Delete an index template. +The provided may contain multiple template names separated by a comma. If multiple template +names are specified then there is no wildcard support and the provided names should match completely with +existing templates. {ref}/indices-delete-template.html[Endpoint documentation] [source,ts] @@ -2136,7 +2296,8 @@ client.indices.deleteIndexTemplate({ name }) [discrete] ==== exists -Returns information about whether a particular index exists. +Check indices. +Checks if one or more indices, index aliases, or data streams exist. {ref}/indices-exists.html[Endpoint documentation] [source,ts] @@ -2162,7 +2323,8 @@ Valid values are: `all`, `open`, `closed`, `hidden`, `none`. [discrete] ==== exists_alias -Returns information about whether a particular alias exists. +Check aliases. +Checks if one or more data stream or index aliases exist. {ref}/indices-aliases.html[Endpoint documentation] [source,ts] @@ -2205,7 +2367,8 @@ client.indices.existsIndexTemplate({ name }) [discrete] ==== explain_data_lifecycle -Retrieves information about the index's current data stream lifecycle, such as any potential encountered error, time since creation etc. +Get the status for a data stream lifecycle. +Retrieves information about an index or data stream’s current data stream lifecycle status, such as time since index creation, time since rollover, the lifecycle configuration managing the index, or any errors encountered during lifecycle execution. {ref}/data-streams-explain-lifecycle.html[Endpoint documentation] [source,ts] @@ -2223,7 +2386,9 @@ client.indices.explainDataLifecycle({ index }) [discrete] ==== get -Returns information about one or more indices. +Get index information. +Returns information about one or more indices. For data streams, the API returns information about the +stream’s backing indices. {ref}/indices-get-index.html[Endpoint documentation] [source,ts] @@ -2248,10 +2413,12 @@ such as open,hidden. ** *`include_defaults` (Optional, boolean)*: If true, return all default settings in the response. ** *`local` (Optional, boolean)*: If true, the request retrieves information from the local node only. Defaults to false, which means information is retrieved from the master node. ** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node. If no response is received before the timeout expires, the request fails and returns an error. +** *`features` (Optional, { name, description } | { name, description }[])*: Return only information on specified index features [discrete] ==== get_alias -Returns an alias. +Get aliases. +Retrieves information for one or more data stream or index aliases. {ref}/indices-aliases.html[Endpoint documentation] [source,ts] @@ -2280,7 +2447,8 @@ Valid values are: `all`, `open`, `closed`, `hidden`, `none`. [discrete] ==== get_data_lifecycle -Returns the data stream lifecycle of the selected data streams. +Get data stream lifecycles. +Retrieves the data stream lifecycle configuration of one or more data streams. {ref}/data-streams-get-lifecycle.html[Endpoint documentation] [source,ts] @@ -2302,7 +2470,8 @@ Valid values are: `all`, `open`, `closed`, `hidden`, `none`. [discrete] ==== get_data_stream -Returns data streams. +Get data streams. +Retrieves information about one or more data streams. {ref}/data-streams.html[Endpoint documentation] [source,ts] @@ -2318,10 +2487,12 @@ client.indices.getDataStream({ ... }) Wildcard (`*`) expressions are supported. If omitted, all data streams are returned. ** *`expand_wildcards` (Optional, Enum("all" | "open" | "closed" | "hidden" | "none") | Enum("all" | "open" | "closed" | "hidden" | "none")[])*: Type of data stream that wildcard patterns can match. Supports a list of values, such as `open,hidden`. +** *`include_defaults` (Optional, boolean)*: If true, returns all relevant default configurations for the index template. [discrete] ==== get_index_template -Returns an index template. +Get index templates. +Returns information about one or more index templates. {ref}/indices-get-template.html[Endpoint documentation] [source,ts] @@ -2337,10 +2508,13 @@ client.indices.getIndexTemplate({ ... }) ** *`local` (Optional, boolean)*: If true, the request retrieves information from the local node only. Defaults to false, which means information is retrieved from the master node. ** *`flat_settings` (Optional, boolean)*: If true, returns settings in flat format. ** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node. If no response is received before the timeout expires, the request fails and returns an error. +** *`include_defaults` (Optional, boolean)*: If true, returns all relevant default configurations for the index template. [discrete] ==== get_mapping -Returns mappings for one or more indices. +Get mapping definitions. +Retrieves mapping definitions for one or more indices. +For data streams, the API retrieves mappings for the stream’s backing indices. {ref}/indices-get-mapping.html[Endpoint documentation] [source,ts] @@ -2368,7 +2542,9 @@ If no response is received before the timeout expires, the request fails and ret [discrete] ==== get_settings -Returns settings for one or more indices. +Get index settings. +Returns setting information for one or more indices. For data streams, +returns setting information for the stream’s backing indices. {ref}/indices-get-settings.html[Endpoint documentation] [source,ts] @@ -2403,7 +2579,17 @@ error. [discrete] ==== migrate_to_data_stream -Migrates an alias to a data stream +Convert an index alias to a data stream. +Converts an index alias to a data stream. +You must have a matching index template that is data stream enabled. +The alias must meet the following criteria: +The alias must have a write index; +All indices for the alias must have a `@timestamp` field mapping of a `date` or `date_nanos` field type; +The alias must not have any filters; +The alias must not use custom routing. +If successful, the request removes the alias and creates a data stream with the same name. +The indices for the alias become hidden backing indices for the stream. +The write index for the alias becomes the write index for the stream. {ref}/data-streams.html[Endpoint documentation] [source,ts] @@ -2419,7 +2605,8 @@ client.indices.migrateToDataStream({ name }) [discrete] ==== modify_data_stream -Modifies a data stream +Update data streams. +Performs one or more data stream modification actions in a single atomic operation. {ref}/data-streams.html[Endpoint documentation] [source,ts] @@ -2435,7 +2622,8 @@ client.indices.modifyDataStream({ actions }) [discrete] ==== put_alias -Creates or updates an alias. +Create or update an alias. +Adds a data stream or index to an alias. {ref}/indices-aliases.html[Endpoint documentation] [source,ts] @@ -2453,7 +2641,7 @@ Wildcard patterns that match both data streams and indices return an error. ** *`name` (string)*: Alias to update. If the alias doesn’t exist, the request creates it. Index alias names support date math. -** *`filter` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: Query used to limit documents the alias can access. +** *`filter` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: Query used to limit documents the alias can access. ** *`index_routing` (Optional, string)*: Value used to route indexing operations to a specific shard. If specified, this overwrites the `routing` value for indexing operations. Data stream aliases don’t support this parameter. @@ -2473,7 +2661,8 @@ If no response is received before the timeout expires, the request fails and ret [discrete] ==== put_data_lifecycle -Updates the data stream lifecycle of the selected data streams. +Update data stream lifecycles. +Update the data stream lifecycle of the specified data streams. {ref}/data-streams-put-lifecycle.html[Endpoint documentation] [source,ts] @@ -2504,7 +2693,8 @@ If no response is received before the timeout expires, the request fails and ret [discrete] ==== put_index_template -Creates or updates an index template. +Create or update an index template. +Index templates define settings, mappings, and aliases that can be applied automatically to new indices. {ref}/indices-put-template.html[Endpoint documentation] [source,ts] @@ -2520,7 +2710,7 @@ client.indices.putIndexTemplate({ name }) ** *`index_patterns` (Optional, string | string[])*: Name of the index template to create. ** *`composed_of` (Optional, string[])*: An ordered list of component template names. Component templates are merged in the order specified, meaning that the last component template specified has the highest precedence. -** *`template` (Optional, { aliases, mappings, settings })*: Template to be applied. +** *`template` (Optional, { aliases, mappings, settings, lifecycle })*: Template to be applied. It may optionally include an `aliases`, `mappings`, or `settings` configuration. ** *`data_stream` (Optional, { hidden })*: If this object is included, the template is used to create data streams and their backing indices. Supports an empty object. @@ -2548,7 +2738,10 @@ If no response is received before the timeout expires, the request fails and ret [discrete] ==== put_mapping -Updates the index mappings. +Update field mappings. +Adds new fields to an existing data stream or index. +You can also use this API to change the search settings of existing fields. +For data streams, these changes are applied to all backing indices by default. {ref}/indices-put-mapping.html[Endpoint documentation] [source,ts] @@ -2572,7 +2765,7 @@ a new date field is added instead of string. not used at all by Elasticsearch, but can be used to store application-specific metadata. ** *`numeric_detection` (Optional, boolean)*: Automatically map strings into numeric data types for all fields. -** *`properties` (Optional, Record)*: Mapping for a field. For new fields, this mapping can include: +** *`properties` (Optional, Record)*: Mapping for a field. For new fields, this mapping can include: - Field name - Field data type @@ -2595,7 +2788,9 @@ If no response is received before the timeout expires, the request fails and ret [discrete] ==== put_settings -Updates the index settings. +Update index settings. +Changes dynamic index settings in real time. For data streams, index setting +changes are applied to all backing indices by default. {ref}/indices-update-settings.html[Endpoint documentation] [source,ts] @@ -2631,7 +2826,8 @@ error. [discrete] ==== put_template -Creates or updates an index template. +Create or update an index template. +Index templates define settings, mappings, and aliases that can be applied automatically to new indices. {ref}/indices-templates-v1.html[Endpoint documentation] [source,ts] @@ -2647,7 +2843,7 @@ client.indices.putTemplate({ name }) ** *`aliases` (Optional, Record)*: Aliases for the index. ** *`index_patterns` (Optional, string | string[])*: Array of wildcard expressions used to match the names of indices during creation. -** *`mappings` (Optional, { all_field, date_detection, dynamic, dynamic_date_formats, dynamic_templates, _field_names, index_field, _meta, numeric_detection, properties, _routing, _size, _source, runtime, enabled, subobjects })*: Mapping for fields in the index. +** *`mappings` (Optional, { all_field, date_detection, dynamic, dynamic_date_formats, dynamic_templates, _field_names, index_field, _meta, numeric_detection, properties, _routing, _size, _source, runtime, enabled, subobjects, _data_stream_timestamp })*: Mapping for fields in the index. ** *`order` (Optional, number)*: Order in which Elasticsearch applies this template if index matches multiple templates. @@ -2663,7 +2859,9 @@ received before the timeout expires, the request fails and returns an error. [discrete] ==== refresh -Performs the refresh operation in one or more indices. +Refresh an index. +A refresh makes recent operations performed on one or more indices available for search. +For data streams, the API runs the refresh operation on the stream’s backing indices. {ref}/indices-refresh.html[Endpoint documentation] [source,ts] @@ -2688,7 +2886,8 @@ Valid values are: `all`, `open`, `closed`, `hidden`, `none`. [discrete] ==== resolve_index -Returns information about any matching indices, aliases, and data streams +Resolves the specified name(s) and/or index patterns for indices, aliases, and data streams. +Multiple patterns and remote clusters are supported. {ref}/indices-resolve-index-api.html[Endpoint documentation] [source,ts] @@ -2709,8 +2908,8 @@ Valid values are: `all`, `open`, `closed`, `hidden`, `none`. [discrete] ==== rollover -Updates an alias to point to a new index when the existing index -is considered to be too large or too old. +Roll over to a new index. +Creates a new index for a data stream or index alias. {ref}/indices-rollover-index.html[Endpoint documentation] [source,ts] @@ -2733,7 +2932,7 @@ If specified, Elasticsearch only performs the rollover if the current index sati If this parameter is not specified, Elasticsearch performs the rollover unconditionally. If conditions are specified, at least one of them must be a `max_*` condition. The index will rollover if any `max_*` condition is satisfied and all `min_*` conditions are satisfied. -** *`mappings` (Optional, { all_field, date_detection, dynamic, dynamic_date_formats, dynamic_templates, _field_names, index_field, _meta, numeric_detection, properties, _routing, _size, _source, runtime, enabled, subobjects })*: Mapping for fields in the index. +** *`mappings` (Optional, { all_field, date_detection, dynamic, dynamic_date_formats, dynamic_templates, _field_names, index_field, _meta, numeric_detection, properties, _routing, _size, _source, runtime, enabled, subobjects, _data_stream_timestamp })*: Mapping for fields in the index. If specified, this mapping can include field names, field data types, and mapping paramaters. ** *`settings` (Optional, Record)*: Configuration options for the index. Data streams do not support this parameter. @@ -2747,7 +2946,8 @@ Set to all or any positive integer up to the total number of shards in the index [discrete] ==== simulate_index_template -Simulate matching the given index name against the index templates in the system +Simulate an index. +Returns the index configuration that would be applied to the specified index from an existing index template. {ref}/indices-simulate-index.html[Endpoint documentation] [source,ts] @@ -2761,10 +2961,12 @@ client.indices.simulateIndexTemplate({ name }) * *Request (object):* ** *`name` (string)*: Name of the index to simulate ** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node. If no response is received before the timeout expires, the request fails and returns an error. +** *`include_defaults` (Optional, boolean)*: If true, returns all relevant default configurations for the index template. [discrete] ==== simulate_template -Simulate resolving the given template name or body +Simulate an index template. +Returns the index configuration that would be applied by a particular index template. {ref}/indices-simulate-template.html[Endpoint documentation] [source,ts] @@ -2784,7 +2986,7 @@ If set to `false`, then indices or data streams matching the template must alway ** *`index_patterns` (Optional, string | string[])*: Array of wildcard (`*`) expressions used to match the names of data streams and indices during creation. ** *`composed_of` (Optional, string[])*: An ordered list of component template names. Component templates are merged in the order specified, meaning that the last component template specified has the highest precedence. -** *`template` (Optional, { aliases, mappings, settings })*: Template to be applied. +** *`template` (Optional, { aliases, mappings, settings, lifecycle })*: Template to be applied. It may optionally include an `aliases`, `mappings`, or `settings` configuration. ** *`data_stream` (Optional, { hidden })*: If this object is included, the template is used to create data streams and their backing indices. Supports an empty object. @@ -2804,10 +3006,12 @@ references a component template that might not exist that uses deprecated components, Elasticsearch will emit a deprecation warning. ** *`create` (Optional, boolean)*: If true, the template passed in the body is only used if no existing templates match the same index patterns. If false, the simulation uses the template with the highest priority. Note that the template is not permanently added or updated in either case; it is only used for the simulation. ** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node. If no response is received before the timeout expires, the request fails and returns an error. +** *`include_defaults` (Optional, boolean)*: If true, returns all relevant default configurations for the index template. [discrete] ==== update_aliases -Updates index aliases. +Create or update an alias. +Adds a data stream or index to an alias. {ref}/indices-aliases.html[Endpoint documentation] [source,ts] @@ -2827,7 +3031,8 @@ If no response is received before the timeout expires, the request fails and ret [discrete] ==== validate_query -Allows a user to validate a potentially expensive query without executing it. +Validate a query. +Validates a query without running it. {ref}/search-validate.html[Endpoint documentation] [source,ts] @@ -2842,7 +3047,7 @@ client.indices.validateQuery({ ... }) ** *`index` (Optional, string | string[])*: List of data streams, indices, and aliases to search. Supports wildcards (`*`). To search all data streams or indices, omit this parameter or use `*` or `_all`. -** *`query` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: Query in the Lucene query string syntax. +** *`query` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: Query in the Lucene query string syntax. ** *`allow_no_indices` (Optional, boolean)*: If `false`, the request returns an error if any wildcard expression, index alias, or `_all` value targets only missing or closed indices. This behavior applies even if the request targets other open indices. ** *`all_shards` (Optional, boolean)*: If `true`, the validation is executed on all shards instead of one random shard per index. @@ -2902,7 +3107,7 @@ client.inference.get({ ... }) [discrete] ==== inference -Perform inference +Perform inference on the service {ref}/post-inference-api.html[Endpoint documentation] [source,ts] @@ -2925,7 +3130,7 @@ Not required for other tasks. [discrete] ==== put -Configure an inference endpoint for use in the Inference API +Create an inference endpoint {ref}/put-inference-api.html[Endpoint documentation] [source,ts] @@ -2945,7 +3150,7 @@ client.inference.put({ inference_id }) === ingest [discrete] ==== delete_pipeline -Deletes a pipeline. +Deletes one or more existing ingest pipeline. {ref}/delete-pipeline-api.html[Endpoint documentation] [source,ts] @@ -2966,7 +3171,8 @@ If no response is received before the timeout expires, the request fails and ret [discrete] ==== get_pipeline -Returns a pipeline. +Returns information about one or more ingest pipelines. +This API returns a local reference of the pipeline. {ref}/get-pipeline-api.html[Endpoint documentation] [source,ts] @@ -2987,7 +3193,9 @@ If no response is received before the timeout expires, the request fails and ret [discrete] ==== processor_grok -Returns a list of the built-in patterns. +Extracts structured fields out of a single text field within a document. +You choose which field to extract matched fields from, as well as the grok pattern you expect will match. +A grok pattern is like a regular expression that supports aliased expressions that can be reused. {ref}/grok-processor.html[Endpoint documentation] [source,ts] @@ -2998,7 +3206,8 @@ client.ingest.processorGrok() [discrete] ==== put_pipeline -Creates or updates a pipeline. +Creates or updates an ingest pipeline. +Changes made using this API take effect immediately. {ref}/ingest.html[Endpoint documentation] [source,ts] @@ -3013,8 +3222,8 @@ client.ingest.putPipeline({ id }) ** *`id` (string)*: ID of the ingest pipeline to create or update. ** *`_meta` (Optional, Record)*: Optional metadata about the ingest pipeline. May have any contents. This map is not automatically generated by Elasticsearch. ** *`description` (Optional, string)*: Description of the ingest pipeline. -** *`on_failure` (Optional, { attachment, append, csv, convert, date, date_index_name, dot_expander, enrich, fail, foreach, json, user_agent, kv, geoip, grok, gsub, join, lowercase, remove, rename, reroute, script, set, sort, split, trim, uppercase, urldecode, bytes, dissect, set_security_user, pipeline, drop, circle, inference }[])*: Processors to run immediately after a processor failure. Each processor supports a processor-level `on_failure` value. If a processor without an `on_failure` value fails, Elasticsearch uses this pipeline-level parameter as a fallback. The processors in this parameter run sequentially in the order specified. Elasticsearch will not attempt to run the pipeline's remaining processors. -** *`processors` (Optional, { attachment, append, csv, convert, date, date_index_name, dot_expander, enrich, fail, foreach, json, user_agent, kv, geoip, grok, gsub, join, lowercase, remove, rename, reroute, script, set, sort, split, trim, uppercase, urldecode, bytes, dissect, set_security_user, pipeline, drop, circle, inference }[])*: Processors used to perform transformations on documents before indexing. Processors run sequentially in the order specified. +** *`on_failure` (Optional, { append, attachment, bytes, circle, convert, csv, date, date_index_name, dissect, dot_expander, drop, enrich, fail, foreach, geoip, grok, gsub, inference, join, json, kv, lowercase, pipeline, remove, rename, reroute, script, set, set_security_user, sort, split, trim, uppercase, urldecode, user_agent }[])*: Processors to run immediately after a processor failure. Each processor supports a processor-level `on_failure` value. If a processor without an `on_failure` value fails, Elasticsearch uses this pipeline-level parameter as a fallback. The processors in this parameter run sequentially in the order specified. Elasticsearch will not attempt to run the pipeline's remaining processors. +** *`processors` (Optional, { append, attachment, bytes, circle, convert, csv, date, date_index_name, dissect, dot_expander, drop, enrich, fail, foreach, geoip, grok, gsub, inference, join, json, kv, lowercase, pipeline, remove, rename, reroute, script, set, set_security_user, sort, split, trim, uppercase, urldecode, user_agent }[])*: Processors used to perform transformations on documents before indexing. Processors run sequentially in the order specified. ** *`version` (Optional, number)*: Version number used by external systems to track ingest pipelines. This parameter is intended for external systems only. Elasticsearch does not use or validate pipeline version numbers. ** *`master_timeout` (Optional, string | -1 | 0)*: Period to wait for a connection to the master node. If no response is received before the timeout expires, the request fails and returns an error. ** *`timeout` (Optional, string | -1 | 0)*: Period to wait for a response. If no response is received before the timeout expires, the request fails and returns an error. @@ -3022,7 +3231,7 @@ client.ingest.putPipeline({ id }) [discrete] ==== simulate -Allows to simulate a pipeline with example documents. +Executes an ingest pipeline against a set of provided documents. {ref}/simulate-pipeline-api.html[Endpoint documentation] [source,ts] @@ -3046,7 +3255,9 @@ If you specify both this and the request path parameter, the API only uses the r === license [discrete] ==== get -Retrieves licensing information for the cluster +Get license information. +Returns information about your Elastic license, including its type, its status, when it was issued, and when it expires. +For more information about the different types of licenses, refer to [Elastic Stack subscriptions](https://www.elastic.co/subscriptions). {ref}/get-license.html[Endpoint documentation] [source,ts] @@ -3066,7 +3277,7 @@ This parameter is deprecated and will always be set to true in 8.x. === logstash [discrete] ==== delete_pipeline -Deletes Logstash Pipelines used by Central Management +Deletes a pipeline used for Logstash Central Management. {ref}/logstash-api-delete-pipeline.html[Endpoint documentation] [source,ts] @@ -3082,7 +3293,7 @@ client.logstash.deletePipeline({ id }) [discrete] ==== get_pipeline -Retrieves Logstash Pipelines used by Central Management +Retrieves pipelines used for Logstash Central Management. {ref}/logstash-api-get-pipeline.html[Endpoint documentation] [source,ts] @@ -3098,7 +3309,7 @@ client.logstash.getPipeline({ ... }) [discrete] ==== put_pipeline -Adds and updates Logstash Pipelines used for Central Management +Creates or updates a pipeline used for Logstash Central Management. {ref}/logstash-api-put-pipeline.html[Endpoint documentation] [source,ts] @@ -3117,7 +3328,11 @@ client.logstash.putPipeline({ id }) === ml [discrete] ==== close_job -Closes one or more anomaly detection jobs. A job can be opened and closed multiple times throughout its lifecycle. +Close anomaly detection jobs. +A job can be opened and closed multiple times throughout its lifecycle. A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. +When you close a job, it runs housekeeping tasks such as pruning the model history, flushing buffers, calculating final results and persisting the model snapshots. Depending upon the size of the job, it could take several minutes to close and the equivalent time to re-open. After it is closed, the job has a minimal overhead on the cluster except for maintaining its meta data. Therefore it is a best practice to close jobs that are no longer required to process data. +If you close an anomaly detection job whose datafeed is running, the request first tries to stop the datafeed. This behavior is equivalent to calling stop datafeed API with the same timeout and force parameters as the close job request. +When a datafeed that has a specified end date stops, it automatically closes its associated job. {ref}/ml-close-job.html[Endpoint documentation] [source,ts] @@ -3136,7 +3351,7 @@ client.ml.closeJob({ job_id }) [discrete] ==== delete_calendar -Deletes a calendar. +Removes all scheduled events from a calendar, then deletes it. {ref}/ml-delete-calendar.html[Endpoint documentation] [source,ts] @@ -3188,7 +3403,7 @@ list of jobs or groups. [discrete] ==== delete_data_frame_analytics -Deletes an existing data frame analytics job. +Deletes a data frame analytics job. {ref}/delete-dfanalytics.html[Endpoint documentation] [source,ts] @@ -3228,6 +3443,8 @@ stopping and deleting the datafeed. [discrete] ==== delete_filter Deletes a filter. +If an anomaly detection job references the filter, you cannot delete the +filter. You must update or delete the job before you can delete the filter. {ref}/ml-delete-filter.html[Endpoint documentation] [source,ts] @@ -3243,7 +3460,13 @@ client.ml.deleteFilter({ filter_id }) [discrete] ==== delete_job -Deletes an existing anomaly detection job. +Delete an anomaly detection job. +All job configuration, model state and results are deleted. +It is not currently possible to delete multiple jobs using wildcards or a +comma separated list. If you delete a job that has a datafeed, the request +first tries to delete the datafeed. This behavior is equivalent to calling +the delete datafeed API with the same timeout and force parameters as the +delete job request. {ref}/ml-delete-job.html[Endpoint documentation] [source,ts] @@ -3266,7 +3489,8 @@ job deletion completes. [discrete] ==== delete_trained_model -Deletes an existing trained inference model that is currently not referenced by an ingest pipeline. +Deletes an existing trained inference model that is currently not referenced +by an ingest pipeline. {ref}/delete-trained-models.html[Endpoint documentation] [source,ts] @@ -3283,7 +3507,10 @@ client.ml.deleteTrainedModel({ model_id }) [discrete] ==== delete_trained_model_alias -Deletes a model alias that refers to the trained model +Deletes a trained model alias. +This API deletes an existing model alias that refers to a trained model. If +the model alias is missing or refers to a model other than the one identified +by the `model_id`, this API returns an error. {ref}/delete-trained-models-aliases.html[Endpoint documentation] [source,ts] @@ -3300,7 +3527,9 @@ client.ml.deleteTrainedModelAlias({ model_alias, model_id }) [discrete] ==== estimate_model_memory -Estimates the model memory +Makes an estimation of the memory usage for an anomaly detection job model. +It is based on analysis configuration details for the job and cardinality +estimates for the fields it references. {ref}/ml-apis.html[Endpoint documentation] [source,ts] @@ -3330,6 +3559,10 @@ omitted from the request if no detectors have a `by_field_name`, [discrete] ==== evaluate_data_frame Evaluates the data frame analytics for an annotated index. +The API packages together commonly used evaluation metrics for various types +of machine learning features. This has been designed for use on indexes +created by data frame analytics. Evaluation requires both a ground truth +field and an analytics result field to be present. {ref}/evaluate-dfanalytics.html[Endpoint documentation] [source,ts] @@ -3343,11 +3576,19 @@ client.ml.evaluateDataFrame({ evaluation, index }) * *Request (object):* ** *`evaluation` ({ classification, outlier_detection, regression })*: Defines the type of evaluation you want to perform. ** *`index` (string)*: Defines the `index` in which the evaluation will be performed. -** *`query` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: A query clause that retrieves a subset of data from the source index. +** *`query` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: A query clause that retrieves a subset of data from the source index. [discrete] ==== flush_job Forces any buffered data to be processed by the job. +The flush jobs API is only applicable when sending data for analysis using +the post data API. Depending on the content of the buffer, then it might +additionally calculate new results. Both flush and close operations are +similar, however the flush is more efficient if you are expecting to send +more data for analysis. When flushing, the job remains open and is available +to continue analyzing data. A close operation additionally prunes and +persists the model state to disk and the job must be opened again before +analyzing further data. {ref}/ml-flush-job.html[Endpoint documentation] [source,ts] @@ -3409,6 +3650,9 @@ client.ml.getCalendars({ ... }) [discrete] ==== get_data_frame_analytics Retrieves configuration information for data frame analytics jobs. +You can get information for multiple data frame analytics jobs in a single +API request by using a comma-separated list of data frame analytics jobs or a +wildcard expression. {ref}/get-dfanalytics.html[Endpoint documentation] [source,ts] @@ -3475,6 +3719,12 @@ there are no matches or only partial matches. [discrete] ==== get_datafeed_stats Retrieves usage information for datafeeds. +You can get statistics for multiple datafeeds in a single API request by +using a comma-separated list of datafeeds or a wildcard expression. You can +get statistics for all datafeeds by using `_all`, by specifying `*` as the +``, or by omitting the ``. If the datafeed is stopped, the +only information you receive is the `datafeed_id` and the `state`. +This API returns a maximum of 10,000 datafeeds. {ref}/ml-get-datafeed-stats.html[Endpoint documentation] [source,ts] @@ -3503,6 +3753,11 @@ partial matches. If this parameter is `false`, the request returns a [discrete] ==== get_datafeeds Retrieves configuration information for datafeeds. +You can get information for multiple datafeeds in a single API request by +using a comma-separated list of datafeeds or a wildcard expression. You can +get information for all datafeeds by using `_all`, by specifying `*` as the +``, or by omitting the ``. +This API returns a maximum of 10,000 datafeeds. {ref}/ml-get-datafeed.html[Endpoint documentation] [source,ts] @@ -3534,6 +3789,7 @@ be retrieved and then added to another cluster. [discrete] ==== get_filters Retrieves filters. +You can get a single filter or all filters. {ref}/ml-get-filter.html[Endpoint documentation] [source,ts] @@ -3581,6 +3837,10 @@ code when there are no matches or only partial matches. [discrete] ==== get_jobs Retrieves configuration information for anomaly detection jobs. +You can get information for multiple anomaly detection jobs in a single API +request by using a group name, a comma-separated list of jobs, or a wildcard +expression. You can get information for all anomaly detection jobs by using +`_all`, by specifying `*` as the ``, or by omitting the ``. {ref}/ml-get-job.html[Endpoint documentation] [source,ts] @@ -3611,7 +3871,23 @@ be retrieved and then added to another cluster. [discrete] ==== get_overall_buckets -Retrieves overall bucket results that summarize the bucket results of multiple anomaly detection jobs. +Retrieves overall bucket results that summarize the bucket results of +multiple anomaly detection jobs. + +The `overall_score` is calculated by combining the scores of all the +buckets within the overall bucket span. First, the maximum +`anomaly_score` per anomaly detection job in the overall bucket is +calculated. Then the `top_n` of those scores are averaged to result in +the `overall_score`. This means that you can fine-tune the +`overall_score` so that it is more or less sensitive to the number of +jobs that detect an anomaly at the same time. For example, if you set +`top_n` to `1`, the `overall_score` is the maximum bucket score in the +overall bucket. Alternatively, if you set `top_n` to the number of jobs, +the `overall_score` is high only when all jobs detect anomalies in that +overall bucket. If you set the `bucket_span` parameter (to a value +greater than its default), the `overall_score` is the maximum +`overall_score` of the overall buckets that have a span equal to the +jobs' largest bucket span. {ref}/ml-get-overall-buckets.html[Endpoint documentation] [source,ts] @@ -3639,7 +3915,7 @@ using `_all` or by specifying `*` as the ``. [discrete] ==== get_trained_models -Retrieves configuration information for a trained inference model. +Retrieves configuration information for a trained model. {ref}/get-trained-models.html[Endpoint documentation] [source,ts] @@ -3679,7 +3955,8 @@ tags are returned. [discrete] ==== get_trained_models_stats -Retrieves usage information for trained inference models. +Retrieves usage information for trained models. You can get usage information for multiple trained +models in a single API request by using a comma-separated list of model IDs or a wildcard expression. {ref}/get-trained-models-stats.html[Endpoint documentation] [source,ts] @@ -3706,7 +3983,7 @@ subset of results when there are partial matches. [discrete] ==== infer_trained_model -Evaluate a trained model. +Evaluates a trained model. {ref}/infer-trained-model.html[Endpoint documentation] [source,ts] @@ -3727,7 +4004,14 @@ Currently, for NLP models, only a single value is allowed. [discrete] ==== open_job -Opens one or more anomaly detection jobs. +Open anomaly detection jobs. +An anomaly detection job must be opened in order for it to be ready to +receive and analyze data. It can be opened and closed multiple times +throughout its lifecycle. +When you open a new job, it starts with an empty model. +When you open an existing job, the most recent model state is automatically +loaded. The job is ready to resume its analysis from where it left off, once +new data is received. {ref}/ml-open-job.html[Endpoint documentation] [source,ts] @@ -3744,7 +4028,7 @@ client.ml.openJob({ job_id }) [discrete] ==== post_calendar_events -Posts scheduled events in a calendar. +Adds scheduled events to a calendar. {ref}/ml-post-calendar-event.html[Endpoint documentation] [source,ts] @@ -3761,7 +4045,7 @@ client.ml.postCalendarEvents({ calendar_id, events }) [discrete] ==== preview_data_frame_analytics -Previews that will be analyzed given a data frame analytics config. +Previews the extracted features used by a data frame analytics config. {ref}/preview-dfanalytics.html[Endpoint documentation] [source,ts] @@ -3781,6 +4065,14 @@ this API. [discrete] ==== preview_datafeed Previews a datafeed. +This API returns the first "page" of search results from a datafeed. +You can preview an existing datafeed or provide configuration details for a datafeed +and anomaly detection job in the API. The preview shows the structure of the data +that will be passed to the anomaly detection engine. +IMPORTANT: When Elasticsearch security features are enabled, the preview uses the credentials of the user that +called the API. However, when the datafeed starts it uses the roles of the last user that created or updated the +datafeed. To get a preview that accurately reflects the behavior of the datafeed, use the appropriate credentials. +You can also use secondary authorization headers to supply the credentials. {ref}/ml-preview-datafeed.html[Endpoint documentation] [source,ts] @@ -3806,7 +4098,7 @@ used. You cannot specify a `job_config` object unless you also supply a `datafee [discrete] ==== put_calendar -Instantiates a calendar. +Creates a calendar. {ref}/ml-put-calendar.html[Endpoint documentation] [source,ts] @@ -3842,6 +4134,8 @@ client.ml.putCalendarJob({ calendar_id, job_id }) [discrete] ==== put_data_frame_analytics Instantiates a data frame analytics job. +This API creates a data frame analytics job that performs an analysis on the +source indices and stores the outcome in a destination index. {ref}/put-dfanalytics.html[Endpoint documentation] [source,ts] @@ -3907,10 +4201,21 @@ analytical processing. If your `elasticsearch.yml` file contains an `xpack.ml.max_model_memory_limit` setting, an error occurs when you try to create data frame analytics jobs that have `model_memory_limit` values greater than that setting. +** *`headers` (Optional, Record)* +** *`version` (Optional, string)* [discrete] ==== put_datafeed Instantiates a datafeed. +Datafeeds retrieve data from Elasticsearch for analysis by an anomaly detection job. +You can associate only one datafeed with each anomaly detection job. +The datafeed contains a query that runs at a defined interval (`frequency`). +If you are concerned about delayed data, you can add a delay (`query_delay') at each interval. +When Elasticsearch security features are enabled, your datafeed remembers which roles the user who created it had +at the time of creation and runs the query using those same roles. If you provide secondary authorization headers, +those credentials are used instead. +You must use Kibana, this API, or the create anomaly detection jobs API to create a datafeed. Do not add a datafeed +directly to the `.ml-config` index. Do not give users `write` privileges on the `.ml-config` index. {ref}/ml-put-datafeed.html[Endpoint documentation] [source,ts] @@ -3925,7 +4230,7 @@ client.ml.putDatafeed({ datafeed_id }) ** *`datafeed_id` (string)*: A numerical character string that uniquely identifies the datafeed. This identifier can contain lowercase alphanumeric characters (a-z and 0-9), hyphens, and underscores. It must start and end with alphanumeric characters. -** *`aggregations` (Optional, Record)*: If set, the datafeed performs aggregation searches. +** *`aggregations` (Optional, Record)*: If set, the datafeed performs aggregation searches. Support for aggregations is limited and should be used only with low cardinality data. ** *`chunking_config` (Optional, { mode, time_span })*: Datafeeds might be required to search over long time periods, for several months or years. This search is split into time chunks in order to ensure the load on Elasticsearch is managed. @@ -3949,7 +4254,7 @@ learning nodes must have the `remote_cluster_client` role. stops and closes the associated job after this many real-time searches return no documents. In other words, it stops after `frequency` times `max_empty_searches` of real-time operation. If not set, a datafeed with no end time that sees no data remains started until it is explicitly stopped. By default, it is not set. -** *`query` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: The Elasticsearch query domain-specific language (DSL). This value corresponds to the query object in an +** *`query` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: The Elasticsearch query domain-specific language (DSL). This value corresponds to the query object in an Elasticsearch search POST body. All the options that are supported by Elasticsearch can be used, as this object is passed verbatim to Elasticsearch. ** *`query_delay` (Optional, string | -1 | 0)*: The number of seconds behind real time that data is queried. For example, if data from 10:04 a.m. might @@ -3961,6 +4266,7 @@ when there are multiple jobs running on the same node. The detector configuration objects in a job can contain functions that use these script fields. ** *`scroll_size` (Optional, number)*: The size parameter that is used in Elasticsearch searches when the datafeed does not use aggregations. The maximum value is the value of `index.max_result_window`, which is 10,000 by default. +** *`headers` (Optional, Record)* ** *`allow_no_indices` (Optional, boolean)*: If true, wildcard indices expressions that resolve into no concrete indices are ignored. This includes the `_all` string or when no indices are specified. ** *`expand_wildcards` (Optional, Enum("all" | "open" | "closed" | "hidden" | "none") | Enum("all" | "open" | "closed" | "hidden" | "none")[])*: Type of index that wildcard patterns can match. If the request can target data streams, this argument determines @@ -3971,6 +4277,8 @@ whether wildcard expressions match hidden data streams. Supports a list of value [discrete] ==== put_filter Instantiates a filter. +A filter contains a list of strings. It can be used by one or more anomaly detection jobs. +Specifically, filters are referenced in the `custom_rules` property of detector configuration objects. {ref}/ml-put-filter.html[Endpoint documentation] [source,ts] @@ -3989,7 +4297,8 @@ Up to 10000 items are allowed in each filter. [discrete] ==== put_job -Instantiates an anomaly detection job. +Create an anomaly detection job. +If you include a `datafeed_config`, you must have read index privileges on the source index. {ref}/ml-put-job.html[Endpoint documentation] [source,ts] @@ -4012,7 +4321,7 @@ client.ml.putJob({ job_id, analysis_config, data_description }) ** *`datafeed_config` (Optional, { aggregations, chunking_config, datafeed_id, delayed_data_check_config, frequency, indices, indices_options, job_id, max_empty_searches, query, query_delay, runtime_mappings, script_fields, scroll_size })*: Defines a datafeed for the anomaly detection job. If Elasticsearch security features are enabled, your datafeed remembers which roles the user who created it had at the time of creation and runs the query using those same roles. If you provide secondary authorization headers, those credentials are used instead. ** *`description` (Optional, string)*: A description of the job. ** *`groups` (Optional, string[])*: A list of job groups. A job can belong to no groups or many. -** *`model_plot_config` (Optional, { enabled })*: This advanced configuration option stores model information along with the results. It provides a more detailed view into anomaly detection. If you enable model plot it can add considerable overhead to the performance of the system; it is not feasible for jobs with many entities. Model plot provides a simplified and indicative view of the model and its bounds. It does not display complex features such as multivariate correlations or multimodal data. As such, anomalies may occasionally be reported which cannot be seen in the model plot. Model plot config can be configured when the job is created or updated later. It must be disabled if performance issues are experienced. +** *`model_plot_config` (Optional, { annotations_enabled, enabled, terms })*: This advanced configuration option stores model information along with the results. It provides a more detailed view into anomaly detection. If you enable model plot it can add considerable overhead to the performance of the system; it is not feasible for jobs with many entities. Model plot provides a simplified and indicative view of the model and its bounds. It does not display complex features such as multivariate correlations or multimodal data. As such, anomalies may occasionally be reported which cannot be seen in the model plot. Model plot config can be configured when the job is created or updated later. It must be disabled if performance issues are experienced. ** *`model_snapshot_retention_days` (Optional, number)*: Advanced configuration option, which affects the automatic removal of old model snapshots for this job. It specifies the maximum period of time (in days) that snapshots are retained. This period is relative to the timestamp of the most recent snapshot for this job. By default, snapshots ten days older than the newest snapshot are deleted. ** *`renormalization_window_days` (Optional, number)*: Advanced configuration option. The period over which adjustments to the score are applied, as new data is seen. The default value is the longer of 30 days or 100 bucket spans. ** *`results_index_name` (Optional, string)*: A text string that affects the name of the machine learning results index. By default, the job generates an index named `.ml-anomalies-shared`. @@ -4020,7 +4329,7 @@ client.ml.putJob({ job_id, analysis_config, data_description }) [discrete] ==== put_trained_model -Creates an inference trained model. +Enables you to supply a trained model that is not created by data frame analytics. {ref}/put-trained-models.html[Endpoint documentation] [source,ts] @@ -4039,7 +4348,7 @@ specified. ** *`definition` (Optional, { preprocessors, trained_model })*: The inference definition for the model. If definition is specified, then compressed_definition cannot be specified. ** *`description` (Optional, string)*: A human-readable description of the inference trained model. -** *`inference_config` (Optional, { regression, classification })*: The default configuration for inference. This can be either a regression +** *`inference_config` (Optional, { regression, classification, text_classification, zero_shot_classification, fill_mask, ner, pass_through, text_embedding, text_expansion, question_answering })*: The default configuration for inference. This can be either a regression or classification configuration. It must match the underlying definition.trained_model's target_type. For pre-packaged models such as ELSER the config is not required. @@ -4057,10 +4366,31 @@ so one of, `linux-x86_64`, `linux-aarch64`, `darwin-x86_64`, `darwin-aarch64`, or `windows-x86_64`. For portable models (those that work independent of processor architecture or OS features), leave this field unset. ** *`tags` (Optional, string[])*: An array of tags to organize the model. +** *`prefix_strings` (Optional, { ingest, search })*: Optional prefix strings applied at inference +** *`defer_definition_decompression` (Optional, boolean)*: If set to `true` and a `compressed_definition` is provided, +the request defers definition decompression and skips relevant +validations. +** *`wait_for_completion` (Optional, boolean)*: Whether to wait for all child operations (e.g. model download) +to complete. [discrete] ==== put_trained_model_alias -Creates a new model alias (or reassigns an existing one) to refer to the trained model +Creates or updates a trained model alias. A trained model alias is a logical +name used to reference a single trained model. +You can use aliases instead of trained model identifiers to make it easier to +reference your models. For example, you can use aliases in inference +aggregations and processors. +An alias must be unique and refer to only a single trained model. However, +you can have multiple aliases for each trained model. +If you use this API to update an alias such that it references a different +trained model ID and the model uses a different type of data frame analytics, +an error occurs. For example, this situation occurs if you have a trained +model for regression analysis and a trained model for classification +analysis; you cannot reassign an alias from one type of trained model to +another. +If you use this API to update an alias and there are very few input fields in +common between the old and new trained models for the model alias, the API +returns a warning. {ref}/put-trained-models-aliases.html[Endpoint documentation] [source,ts] @@ -4080,7 +4410,7 @@ already assigned and this parameter is false, the API returns an error. [discrete] ==== put_trained_model_definition_part -Creates part of a trained model definition +Creates part of a trained model definition. {ref}/put-trained-model-definition-part.html[Endpoint documentation] [source,ts] @@ -4101,7 +4431,9 @@ order of their part number. The first part must be `0` and the final part must b [discrete] ==== put_trained_model_vocabulary -Creates a trained model vocabulary +Creates a trained model vocabulary. +This API is supported only for natural language processing (NLP) models. +The vocabulary is stored in the index as described in `inference_config.*.vocabulary` of the trained model definition. {ref}/put-trained-model-vocabulary.html[Endpoint documentation] [source,ts] @@ -4115,10 +4447,16 @@ client.ml.putTrainedModelVocabulary({ model_id, vocabulary }) * *Request (object):* ** *`model_id` (string)*: The unique identifier of the trained model. ** *`vocabulary` (string[])*: The model vocabulary, which must not be empty. +** *`merges` (Optional, string[])*: The optional model merges if required by the tokenizer. +** *`scores` (Optional, number[])*: The optional vocabulary value scores if required by the tokenizer. [discrete] ==== reset_job -Resets an existing anomaly detection job. +Resets an anomaly detection job. +All model state and results are deleted. The job is ready to start over as if +it had just been created. +It is not currently possible to reset multiple jobs using wildcards or a +comma separated list. {ref}/ml-reset-job.html[Endpoint documentation] [source,ts] @@ -4140,6 +4478,17 @@ reset. [discrete] ==== start_data_frame_analytics Starts a data frame analytics job. +A data frame analytics job can be started and stopped multiple times +throughout its lifecycle. +If the destination index does not exist, it is created automatically the +first time you start the data frame analytics job. The +`index.number_of_shards` and `index.number_of_replicas` settings for the +destination index are copied from the source index. If there are multiple +source indices, the destination index copies the highest setting values. The +mappings for the destination index are also copied from the source indices. +If there are any mapping conflicts, the job fails to start. +If the destination index exists, it is used as is. You can therefore set up +the destination index in advance with custom settings and mappings. {ref}/start-dfanalytics.html[Endpoint documentation] [source,ts] @@ -4161,6 +4510,18 @@ starts. ==== start_datafeed Starts one or more datafeeds. +A datafeed must be started in order to retrieve data from Elasticsearch. A datafeed can be started and stopped +multiple times throughout its lifecycle. + +Before you can start a datafeed, the anomaly detection job must be open. Otherwise, an error occurs. + +If you restart a stopped datafeed, it continues processing input data from the next millisecond after it was stopped. +If new data was indexed for that exact millisecond between stopping and starting, it will be ignored. + +When Elasticsearch security features are enabled, your datafeed remembers which roles the last user to create or +update it had at the time of creation or update and runs the query using those same roles. If you provided secondary +authorization headers when you created or updated the datafeed, those credentials are used instead. + {ref}/ml-start-datafeed.html[Endpoint documentation] [source,ts] ---- @@ -4180,7 +4541,7 @@ characters. [discrete] ==== start_trained_model_deployment -Start a trained model deployment. +Starts a trained model deployment, which allocates the model to every machine learning node. {ref}/start-trained-model-deployment.html[Endpoint documentation] [source,ts] @@ -4216,6 +4577,8 @@ it will automatically be changed to a value less than the number of hardware thr [discrete] ==== stop_data_frame_analytics Stops one or more data frame analytics jobs. +A data frame analytics job can be started and stopped multiple times +throughout its lifecycle. {ref}/stop-dfanalytics.html[Endpoint documentation] [source,ts] @@ -4248,6 +4611,8 @@ stops. Defaults to 20 seconds. [discrete] ==== stop_datafeed Stops one or more datafeeds. +A datafeed that is stopped ceases to retrieve data from Elasticsearch. A datafeed can be started and stopped +multiple times throughout its lifecycle. {ref}/ml-stop-datafeed.html[Endpoint documentation] [source,ts] @@ -4268,7 +4633,7 @@ the identifier. [discrete] ==== stop_trained_model_deployment -Stop a trained model deployment. +Stops a trained model deployment. {ref}/stop-trained-model-deployment.html[Endpoint documentation] [source,ts] @@ -4290,7 +4655,7 @@ restart the model deployment. [discrete] ==== update_data_frame_analytics -Updates certain properties of a data frame analytics job. +Updates an existing data frame analytics job. {ref}/update-dfanalytics.html[Endpoint documentation] [source,ts] @@ -4320,7 +4685,11 @@ learning node capacity for it to be immediately assigned to a node. [discrete] ==== update_datafeed -Updates certain properties of a datafeed. +Updates the properties of a datafeed. +You must stop and start the datafeed for the changes to be applied. +When Elasticsearch security features are enabled, your datafeed remembers which roles the user who updated it had at +the time of the update and runs the query using those same roles. If you provide secondary authorization headers, +those credentials are used instead. {ref}/ml-update-datafeed.html[Endpoint documentation] [source,ts] @@ -4335,7 +4704,7 @@ client.ml.updateDatafeed({ datafeed_id }) ** *`datafeed_id` (string)*: A numerical character string that uniquely identifies the datafeed. This identifier can contain lowercase alphanumeric characters (a-z and 0-9), hyphens, and underscores. It must start and end with alphanumeric characters. -** *`aggregations` (Optional, Record)*: If set, the datafeed performs aggregation searches. Support for aggregations is limited and should be used only +** *`aggregations` (Optional, Record)*: If set, the datafeed performs aggregation searches. Support for aggregations is limited and should be used only with low cardinality data. ** *`chunking_config` (Optional, { mode, time_span })*: Datafeeds might search over long time periods, for several months or years. This search is split into time chunks in order to ensure the load on Elasticsearch is managed. Chunking configuration controls how the size of @@ -4358,7 +4727,7 @@ learning nodes must have the `remote_cluster_client` role. stops and closes the associated job after this many real-time searches return no documents. In other words, it stops after `frequency` times `max_empty_searches` of real-time operation. If not set, a datafeed with no end time that sees no data remains started until it is explicitly stopped. By default, it is not set. -** *`query` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: The Elasticsearch query domain-specific language (DSL). This value corresponds to the query object in an +** *`query` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: The Elasticsearch query domain-specific language (DSL). This value corresponds to the query object in an Elasticsearch search POST body. All the options that are supported by Elasticsearch can be used, as this object is passed verbatim to Elasticsearch. Note that if you change the query, the analyzed data is also changed. Therefore, the time required to learn might be long and the understandability of the results is @@ -4389,7 +4758,7 @@ whether wildcard expressions match hidden data streams. Supports a list of value [discrete] ==== update_filter -Updates the description of a filter, adds items, or removes items. +Updates the description of a filter, adds items, or removes items from the list. {ref}/ml-update-filter.html[Endpoint documentation] [source,ts] @@ -4446,7 +4815,7 @@ For example, it can contain custom URL information as shown in Adding custom URLs to machine learning results. ** *`categorization_filters` (Optional, string[])* ** *`description` (Optional, string)*: A description of the job. -** *`model_plot_config` (Optional, { enabled })* +** *`model_plot_config` (Optional, { annotations_enabled, enabled, terms })* ** *`model_prune_window` (Optional, string | -1 | 0)* ** *`daily_model_snapshot_retention_after_days` (Optional, number)*: Advanced configuration option, which affects the automatic removal of old model snapshots for this job. It specifies a period of time (in days) @@ -4473,7 +4842,7 @@ value is null, which means all results are retained. [discrete] ==== update_trained_model_deployment -Updates certain properties of trained model deployment. +Starts a trained model deployment, which allocates the model to every machine learning node. {ref}/update-trained-model-deployment.html[Endpoint documentation] [source,ts] @@ -4494,15 +4863,32 @@ If this setting is greater than the number of hardware threads it will automatically be changed to a value less than the number of hardware threads. [discrete] -=== query_ruleset +=== query_rules [discrete] -==== delete +==== delete_rule +Deletes a query rule within a query ruleset. + +{ref}/delete-query-rule.html[Endpoint documentation] +[source,ts] +---- +client.queryRules.deleteRule({ ruleset_id, rule_id }) +---- + +[discrete] +==== Arguments + +* *Request (object):* +** *`ruleset_id` (string)*: The unique identifier of the query ruleset containing the rule to delete +** *`rule_id` (string)*: The unique identifier of the query rule within the specified ruleset to delete + +[discrete] +==== delete_ruleset Deletes a query ruleset. {ref}/delete-query-ruleset.html[Endpoint documentation] [source,ts] ---- -client.queryRuleset.delete({ ruleset_id }) +client.queryRules.deleteRuleset({ ruleset_id }) ---- [discrete] @@ -4512,13 +4898,30 @@ client.queryRuleset.delete({ ruleset_id }) ** *`ruleset_id` (string)*: The unique identifier of the query ruleset to delete [discrete] -==== get -Returns the details about a query ruleset. +==== get_rule +Returns the details about a query rule within a query ruleset + +{ref}/get-query-rule.html[Endpoint documentation] +[source,ts] +---- +client.queryRules.getRule({ ruleset_id, rule_id }) +---- + +[discrete] +==== Arguments + +* *Request (object):* +** *`ruleset_id` (string)*: The unique identifier of the query ruleset containing the rule to retrieve +** *`rule_id` (string)*: The unique identifier of the query rule within the specified ruleset to retrieve + +[discrete] +==== get_ruleset +Returns the details about a query ruleset {ref}/get-query-ruleset.html[Endpoint documentation] [source,ts] ---- -client.queryRuleset.get({ ruleset_id }) +client.queryRules.getRuleset({ ruleset_id }) ---- [discrete] @@ -4528,13 +4931,13 @@ client.queryRuleset.get({ ruleset_id }) ** *`ruleset_id` (string)*: The unique identifier of the query ruleset [discrete] -==== list -Lists query rulesets. +==== list_rulesets +Returns summarized information about existing query rulesets. {ref}/list-query-rulesets.html[Endpoint documentation] [source,ts] ---- -client.queryRuleset.list({ ... }) +client.queryRules.listRulesets({ ... }) ---- [discrete] @@ -4545,13 +4948,34 @@ client.queryRuleset.list({ ... }) ** *`size` (Optional, number)*: specifies a max number of results to get [discrete] -==== put +==== put_rule +Creates or updates a query rule within a query ruleset. + +{ref}/put-query-rule.html[Endpoint documentation] +[source,ts] +---- +client.queryRules.putRule({ ruleset_id, rule_id, type, criteria, actions }) +---- + +[discrete] +==== Arguments + +* *Request (object):* +** *`ruleset_id` (string)*: The unique identifier of the query ruleset containing the rule to be created or updated +** *`rule_id` (string)*: The unique identifier of the query rule within the specified ruleset to be created or updated +** *`type` (Enum("pinned"))* +** *`criteria` ({ type, metadata, values } | { type, metadata, values }[])* +** *`actions` ({ ids, docs })* +** *`priority` (Optional, number)* + +[discrete] +==== put_ruleset Creates or updates a query ruleset. {ref}/put-query-ruleset.html[Endpoint documentation] [source,ts] ---- -client.queryRuleset.put({ ruleset_id, rules }) +client.queryRules.putRuleset({ ruleset_id, rules }) ---- [discrete] @@ -4559,7 +4983,7 @@ client.queryRuleset.put({ ruleset_id, rules }) * *Request (object):* ** *`ruleset_id` (string)*: The unique identifier of the query ruleset to be created or updated -** *`rules` ({ rule_id, type, criteria, actions }[])* +** *`rules` ({ rule_id, type, criteria, actions, priority } | { rule_id, type, criteria, actions, priority }[])* [discrete] === search_application @@ -4597,7 +5021,7 @@ client.searchApplication.deleteBehavioralAnalytics({ name }) [discrete] ==== get -Returns the details about a search application. +Returns the details about a search application {ref}/get-search-application.html[Endpoint documentation] [source,ts] @@ -4681,7 +5105,7 @@ client.searchApplication.putBehavioralAnalytics({ name }) [discrete] ==== search -Perform a search against a search application +Perform a search against a search application. {ref}/search-application-search.html[Endpoint documentation] [source,ts] @@ -4695,12 +5119,17 @@ client.searchApplication.search({ name }) * *Request (object):* ** *`name` (string)*: The name of the search application to be searched. ** *`params` (Optional, Record)*: Query parameters specific to this request, which will override any defaults specified in the template. +** *`typed_keys` (Optional, boolean)*: Determines whether aggregation names are prefixed by their respective types in the response. [discrete] === security [discrete] ==== authenticate -Enables authentication as a user and retrieve information about the authenticated user. +Authenticate a user. +Authenticates a user and returns information about the authenticated user. +Include the user information in a [basic auth header](https://en.wikipedia.org/wiki/Basic_access_authentication). +A successful call returns a JSON structure that shows user information such as their username, the roles that are assigned to the user, any assigned metadata, and information about the realms that authenticated and authorized the user. +If the user cannot be authenticated, this API returns a 401 status code. {ref}/security-api-authenticate.html[Endpoint documentation] [source,ts] @@ -4711,7 +5140,11 @@ client.security.authenticate() [discrete] ==== create_api_key +Create an API key. Creates an API key for access without requiring basic authentication. +A successful request returns a JSON structure that contains the API key, its unique id, and its name. +If applicable, it also returns expiration information for the API key in milliseconds. +NOTE: By default, API keys never expire. You can specify expiration information when you create the API keys. {ref}/security-api-create-api-key.html[Endpoint documentation] [source,ts] @@ -4725,12 +5158,16 @@ client.security.createApiKey({ ... }) * *Request (object):* ** *`expiration` (Optional, string | -1 | 0)*: Expiration time for the API key. By default, API keys never expire. ** *`name` (Optional, string)*: Specifies the name for this API key. -** *`role_descriptors` (Optional, Record)*: An array of role descriptors for this API key. This parameter is optional. When it is not specified or is an empty array, then the API key will have a point in time snapshot of permissions of the authenticated user. If you supply role descriptors then the resultant permissions would be an intersection of API keys permissions and authenticated user’s permissions thereby limiting the access scope for API keys. The structure of role descriptor is the same as the request for create role API. For more details, see create or update roles API. +** *`role_descriptors` (Optional, Record)*: An array of role descriptors for this API key. This parameter is optional. When it is not specified or is an empty array, then the API key will have a point in time snapshot of permissions of the authenticated user. If you supply role descriptors then the resultant permissions would be an intersection of API keys permissions and authenticated user’s permissions thereby limiting the access scope for API keys. The structure of role descriptor is the same as the request for create role API. For more details, see create or update roles API. +** *`metadata` (Optional, Record)*: Arbitrary metadata that you want to associate with the API key. It supports nested data structure. Within the metadata object, keys beginning with `_` are reserved for system usage. ** *`refresh` (Optional, Enum(true | false | "wait_for"))*: If `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes. [discrete] ==== get_api_key +Get API key information. Retrieves information for one or more API keys. +NOTE: If you have only the `manage_own_api_key` privilege, this API returns only the API keys that you own. +If you have `read_security`, `manage_api_key` or greater privileges (including `manage_security`), this API returns all API keys regardless of ownership. {ref}/security-api-get-api-key.html[Endpoint documentation] [source,ts] @@ -4753,9 +5190,16 @@ The `realm_name` or `username` parameters cannot be specified when this paramete This parameter cannot be used with either `id` or `name` or when `owner` flag is set to `true`. ** *`username` (Optional, string)*: The username of a user. This parameter cannot be used with either `id` or `name` or when `owner` flag is set to `true`. +** *`with_limited_by` (Optional, boolean)*: Return the snapshot of the owner user's role descriptors +associated with the API key. An API key's actual +permission is the intersection of its assigned role +descriptors and the owner user's role descriptors. +** *`active_only` (Optional, boolean)*: A boolean flag that can be used to query API keys that are currently active. An API key is considered active if it is neither invalidated, nor expired at query time. You can specify this together with other parameters such as `owner` or `name`. If `active_only` is false, the response will include both active and inactive (expired or invalidated) keys. +** *`with_profile_uid` (Optional, boolean)*: Determines whether to also retrieve the profile uid, for the API key owner principal, if it exists. [discrete] ==== has_privileges +Check user privileges. Determines whether the specified user has a specified list of privileges. {ref}/security-api-has-privileges.html[Endpoint documentation] @@ -4770,12 +5214,19 @@ client.security.hasPrivileges({ ... }) * *Request (object):* ** *`user` (Optional, string)*: Username ** *`application` (Optional, { application, privileges, resources }[])* -** *`cluster` (Optional, Enum("all" | "cancel_task" | "create_snapshot" | "cross_cluster_replication" | "cross_cluster_search" | "delegate_pki" | "grant_api_key" | "manage" | "manage_api_key" | "manage_autoscaling" | "manage_behavioral_analytics" | "manage_ccr" | "manage_data_frame_transforms" | "manage_data_stream_global_retention" | "manage_enrich" | "manage_ilm" | "manage_index_templates" | "manage_inference" | "manage_ingest_pipelines" | "manage_logstash_pipelines" | "manage_ml" | "manage_oidc" | "manage_own_api_key" | "manage_pipeline" | "manage_rollup" | "manage_saml" | "manage_search_application" | "manage_search_query_rules" | "manage_search_synonyms" | "manage_security" | "manage_service_account" | "manage_slm" | "manage_token" | "manage_transform" | "manage_user_profile" | "manage_watcher" | "monitor" | "monitor_data_frame_transforms" | "monitor_data_stream_global_retention" | "monitor_enrich" | "monitor_inference" | "monitor_ml" | "monitor_rollup" | "monitor_snapshot" | "monitor_text_structure" | "monitor_transform" | "monitor_watcher" | "none" | "post_behavioral_analytics_event" | "read_ccr" | "read_connector_secrets" | "read_fleet_secrets" | "read_ilm" | "read_pipeline" | "read_security" | "read_slm" | "transport_client" | "write_connector_secrets" | "write_fleet_secrets")[])*: A list of the cluster privileges that you want to check. +** *`cluster` (Optional, Enum("all" | "cancel_task" | "create_snapshot" | "cross_cluster_replication" | "cross_cluster_search" | "delegate_pki" | "grant_api_key" | "manage" | "manage_api_key" | "manage_autoscaling" | "manage_behavioral_analytics" | "manage_ccr" | "manage_data_frame_transforms" | "manage_data_stream_global_retention" | "manage_enrich" | "manage_ilm" | "manage_index_templates" | "manage_inference" | "manage_ingest_pipelines" | "manage_logstash_pipelines" | "manage_ml" | "manage_oidc" | "manage_own_api_key" | "manage_pipeline" | "manage_rollup" | "manage_saml" | "manage_search_application" | "manage_search_query_rules" | "manage_search_synonyms" | "manage_security" | "manage_service_account" | "manage_slm" | "manage_token" | "manage_transform" | "manage_user_profile" | "manage_watcher" | "monitor" | "monitor_data_frame_transforms" | "monitor_data_stream_global_retention" | "monitor_enrich" | "monitor_inference" | "monitor_ml" | "monitor_rollup" | "monitor_snapshot" | "monitor_text_structure" | "monitor_transform" | "monitor_watcher" | "none" | "post_behavioral_analytics_event" | "read_ccr" | "read_fleet_secrets" | "read_ilm" | "read_pipeline" | "read_security" | "read_slm" | "transport_client" | "write_connector_secrets" | "write_fleet_secrets")[])*: A list of the cluster privileges that you want to check. ** *`index` (Optional, { names, privileges, allow_restricted_indices }[])* [discrete] ==== invalidate_api_key +Invalidate API keys. Invalidates one or more API keys. +The `manage_api_key` privilege allows deleting any API keys. +The `manage_own_api_key` only allows deleting API keys that are owned by the user. +In addition, with the `manage_own_api_key` privilege, an invalidation request must be issued in one of the three formats: +- Set the parameter `owner=true`. +- Or, set both `username` and `realm_name` to match the user’s identity. +- Or, if the request is issued by an API key, i.e. an API key invalidates itself, specify its ID in the `ids` field. {ref}/security-api-invalidate-api-key.html[Endpoint documentation] [source,ts] @@ -4801,7 +5252,8 @@ This parameter cannot be used with either `ids` or `name`, or when `owner` flag [discrete] ==== query_api_keys -Retrieves information for API keys using a subset of query DSL +Query API keys. +Retrieves a paginated list of API keys and their information. You can optionally filter the results with a query. {ref}/security-api-query-api-key.html[Endpoint documentation] [source,ts] @@ -4833,10 +5285,27 @@ In addition, sort can also be applied to the `_doc` field to sort by index order By default, you cannot page through more than 10,000 hits using the `from` and `size` parameters. To page through more hits, use the `search_after` parameter. ** *`search_after` (Optional, number | number | string | boolean | null | User-defined value[])*: Search after definition +** *`with_limited_by` (Optional, boolean)*: Return the snapshot of the owner user's role descriptors associated with the API key. +An API key's actual permission is the intersection of its assigned role descriptors and the owner user's role descriptors. +** *`with_profile_uid` (Optional, boolean)*: Determines whether to also retrieve the profile uid, for the API key owner principal, if it exists. +** *`typed_keys` (Optional, boolean)*: Determines whether aggregation names are prefixed by their respective types in the response. [discrete] ==== update_api_key +Update an API key. Updates attributes of an existing API key. +Users can only update API keys that they created or that were granted to them. +Use this API to update API keys created by the create API Key or grant API Key APIs. +If you need to apply the same update to many API keys, you can use bulk update API Keys to reduce overhead. +It’s not possible to update expired API keys, or API keys that have been invalidated by invalidate API Key. +This API supports updates to an API key’s access scope and metadata. +The access scope of an API key is derived from the `role_descriptors` you specify in the request, and a snapshot of the owner user’s permissions at the time of the request. +The snapshot of the owner’s permissions is updated automatically on every call. +If you don’t specify `role_descriptors` in the request, a call to this API might still change the API key’s access scope. +This change can occur if the owner user’s permissions have changed since the API key was created or last modified. +To update another user’s API key, use the `run_as` feature to submit a request on behalf of another user. +IMPORTANT: It’s not possible to use an API key as the authentication credential for this API. +To update an API key, the owner user’s credentials are required. {ref}/security-api-update-api-key.html[Endpoint documentation] [source,ts] @@ -4849,7 +5318,7 @@ client.security.updateApiKey({ id }) * *Request (object):* ** *`id` (string)*: The ID of the API key to update. -** *`role_descriptors` (Optional, Record)*: An array of role descriptors for this API key. This parameter is optional. When it is not specified or is an empty array, then the API key will have a point in time snapshot of permissions of the authenticated user. If you supply role descriptors then the resultant permissions would be an intersection of API keys permissions and authenticated user’s permissions thereby limiting the access scope for API keys. The structure of role descriptor is the same as the request for create role API. For more details, see create or update roles API. +** *`role_descriptors` (Optional, Record)*: An array of role descriptors for this API key. This parameter is optional. When it is not specified or is an empty array, then the API key will have a point in time snapshot of permissions of the authenticated user. If you supply role descriptors then the resultant permissions would be an intersection of API keys permissions and authenticated user’s permissions thereby limiting the access scope for API keys. The structure of role descriptor is the same as the request for create role API. For more details, see create or update roles API. ** *`metadata` (Optional, Record)*: Arbitrary metadata that you want to associate with the API key. It supports nested data structure. Within the metadata object, keys beginning with _ are reserved for system usage. ** *`expiration` (Optional, string | -1 | 0)*: Expiration time for the API key. @@ -4946,7 +5415,7 @@ client.sql.query({ ... }) If you specify a cursor, the API only uses the `columnar` and `time_zone` request body parameters. It ignores other request body parameters. ** *`fetch_size` (Optional, number)*: The maximum number of rows (or entries) to return in one response -** *`filter` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: Elasticsearch query DSL for additional filtering. +** *`filter` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: Elasticsearch query DSL for additional filtering. ** *`query` (Optional, string)*: SQL query to run. ** *`request_timeout` (Optional, string | -1 | 0)*: The timeout before the request fails. ** *`page_timeout` (Optional, string | -1 | 0)*: The timeout before a pagination request fails. @@ -4977,7 +5446,7 @@ client.sql.translate({ query }) * *Request (object):* ** *`query` (string)*: SQL query to run. ** *`fetch_size` (Optional, number)*: The maximum number of rows (or entries) to return in one response. -** *`filter` (Optional, { bool, boosting, common, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule_query, script, script_score, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, term, terms, terms_set, wildcard, wrapper, type })*: Elasticsearch query DSL for additional filtering. +** *`filter` (Optional, { bool, boosting, common, combined_fields, constant_score, dis_max, distance_feature, exists, function_score, fuzzy, geo_bounding_box, geo_distance, geo_polygon, geo_shape, has_child, has_parent, ids, intervals, knn, match, match_all, match_bool_prefix, match_none, match_phrase, match_phrase_prefix, more_like_this, multi_match, nested, parent_id, percolate, pinned, prefix, query_string, range, rank_feature, regexp, rule, script, script_score, semantic, shape, simple_query_string, span_containing, span_field_masking, span_first, span_multi, span_near, span_not, span_or, span_term, span_within, sparse_vector, term, terms, terms_set, text_expansion, weighted_tokens, wildcard, wrapper, type })*: Elasticsearch query DSL for additional filtering. ** *`time_zone` (Optional, string)*: ISO-8601 time zone ID for the search. [discrete] @@ -5069,7 +5538,7 @@ client.synonyms.getSynonymsSets({ ... }) [discrete] ==== put_synonym -Creates or updates a synonyms set +Creates or updates a synonym set. {ref}/put-synonyms-set.html[Endpoint documentation] [source,ts] @@ -5106,7 +5575,8 @@ client.synonyms.putSynonymRule({ set_id, rule_id, synonyms }) === tasks [discrete] ==== get -Returns information about a task. +Get task information. +Returns information about the tasks currently executing in the cluster. {ref}/tasks.html[Endpoint documentation] [source,ts] @@ -5127,7 +5597,8 @@ If no response is received before the timeout expires, the request fails and ret === transform [discrete] ==== delete_transform -Deletes an existing transform. +Delete a transform. +Deletes a transform. {ref}/delete-transform.html[Endpoint documentation] [source,ts] @@ -5148,6 +5619,7 @@ index will not be deleted [discrete] ==== get_transform +Get transforms. Retrieves configuration information for transforms. {ref}/get-transform.html[Endpoint documentation] @@ -5180,6 +5652,7 @@ be retrieved and then added to another cluster. [discrete] ==== get_transform_stats +Get transform stats. Retrieves usage information for transforms. {ref}/get-transform-stats.html[Endpoint documentation] @@ -5210,7 +5683,12 @@ there are no matches or only partial matches. [discrete] ==== preview_transform -Previews a transform. +Preview a transform. +Generates a preview of the results that you will get when you create a transform with the same configuration. + +It returns a maximum of 100 results. The calculations are based on all the current data in the source index. It also +generates a list of mappings and settings for the destination index. These values are determined based on the field +types of the source index and the transform aggregations. {ref}/preview-transform.html[Endpoint documentation] [source,ts] @@ -5234,7 +5712,7 @@ indexing. The minimum value is 1s and the maximum is 1h. These objects define the group by fields and the aggregation to reduce the data. ** *`source` (Optional, { index, query, remote, size, slice, sort, _source, runtime_mappings })*: The source of the data for the transform. -** *`settings` (Optional, { align_checkpoints, dates_as_epoch_millis, deduce_mappings, docs_per_second, max_page_search_size })*: Defines optional transform settings. +** *`settings` (Optional, { align_checkpoints, dates_as_epoch_millis, deduce_mappings, docs_per_second, max_page_search_size, unattended })*: Defines optional transform settings. ** *`sync` (Optional, { time })*: Defines the properties transforms require to run continuously. ** *`retention_policy` (Optional, { time })*: Defines a retention policy for the transform. Data that meets the defined criteria is deleted from the destination index. @@ -5245,7 +5723,28 @@ timeout expires, the request fails and returns an error. [discrete] ==== put_transform -Instantiates a transform. +Create a transform. +Creates a transform. + +A transform copies data from source indices, transforms it, and persists it into an entity-centric destination index. You can also think of the destination index as a two-dimensional tabular data structure (known as +a data frame). The ID for each document in the data frame is generated from a hash of the entity, so there is a +unique row per entity. + +You must choose either the latest or pivot method for your transform; you cannot use both in a single transform. If +you choose to use the pivot method for your transform, the entities are defined by the set of `group_by` fields in +the pivot object. If you choose to use the latest method, the entities are defined by the `unique_key` field values +in the latest object. + +You must have `create_index`, `index`, and `read` privileges on the destination index and `read` and +`view_index_metadata` privileges on the source indices. When Elasticsearch security features are enabled, the +transform remembers which roles the user that created it had at the time of creation and uses those same roles. If +those roles do not have the required privileges on the source and destination indices, the transform fails when it +attempts unauthorized operations. + +NOTE: You must use Kibana or this API to create a transform. Do not add a transform directly into any +`.transform-internal*` indices using the Elasticsearch index API. If Elasticsearch security features are enabled, do +not give users any privileges on `.transform-internal*` indices. If you used transforms prior to 7.5, also do not +give users any privileges on `.data-frame-internal*` indices. {ref}/put-transform.html[Endpoint documentation] [source,ts] @@ -5271,7 +5770,7 @@ The minimum value is `1s` and the maximum is `1h`. and the aggregation to reduce the data. ** *`retention_policy` (Optional, { time })*: Defines a retention policy for the transform. Data that meets the defined criteria is deleted from the destination index. -** *`settings` (Optional, { align_checkpoints, dates_as_epoch_millis, deduce_mappings, docs_per_second, max_page_search_size })*: Defines optional transform settings. +** *`settings` (Optional, { align_checkpoints, dates_as_epoch_millis, deduce_mappings, docs_per_second, max_page_search_size, unattended })*: Defines optional transform settings. ** *`sync` (Optional, { time })*: Defines the properties transforms require to run continuously. ** *`defer_validation` (Optional, boolean)*: When the transform is created, a series of validations occur to ensure its success. For example, there is a check for the existence of the source indices and a check that the destination index is not part of the source @@ -5282,7 +5781,10 @@ the exception of privilege checks. [discrete] ==== reset_transform -Resets an existing transform. +Reset a transform. +Resets a transform. +Before you can reset it, you must stop it; alternatively, use the `force` query parameter. +If the destination index was created by the transform, it is deleted. {ref}/reset-transform.html[Endpoint documentation] [source,ts] @@ -5301,7 +5803,13 @@ must be stopped before it can be reset. [discrete] ==== schedule_now_transform -Schedules now a transform. +Schedule a transform to start now. +Instantly runs a transform to process data. + +If you _schedule_now a transform, it will process the new data instantly, +without waiting for the configured frequency interval. After _schedule_now API is called, +the transform will be processed again at now + frequency unless _schedule_now API +is called again in the meantime. {ref}/schedule-now-transform.html[Endpoint documentation] [source,ts] @@ -5318,7 +5826,23 @@ client.transform.scheduleNowTransform({ transform_id }) [discrete] ==== start_transform -Starts one or more transforms. +Start a transform. +Starts a transform. + +When you start a transform, it creates the destination index if it does not already exist. The `number_of_shards` is +set to `1` and the `auto_expand_replicas` is set to `0-1`. If it is a pivot transform, it deduces the mapping +definitions for the destination index from the source indices and the transform aggregations. If fields in the +destination index are derived from scripts (as in the case of `scripted_metric` or `bucket_script` aggregations), +the transform uses dynamic mappings unless an index template exists. If it is a latest transform, it does not deduce +mapping definitions; it uses dynamic mappings. To use explicit mappings, create the destination index before you +start the transform. Alternatively, you can create an index template, though it does not affect the deduced mappings +in a pivot transform. + +When the transform starts, a series of validations occur to ensure its success. If you deferred validation when you +created the transform, they occur when you start the transform—​with the exception of privilege checks. When +Elasticsearch security features are enabled, the transform remembers which roles the user that created it had at the +time of creation and uses those same roles. If those roles do not have the required privileges on the source and +destination indices, the transform fails when it attempts unauthorized operations. {ref}/start-transform.html[Endpoint documentation] [source,ts] @@ -5336,6 +5860,7 @@ client.transform.startTransform({ transform_id }) [discrete] ==== stop_transform +Stop transforms. Stops one or more transforms. {ref}/stop-transform.html[Endpoint documentation] @@ -5369,8 +5894,15 @@ immediately and the indexer is stopped asynchronously in the background. [discrete] ==== update_transform +Update a transform. Updates certain properties of a transform. +All updated properties except `description` do not take effect until after the transform starts the next checkpoint, +thus there is data consistency in each checkpoint. To use this API, you must have `read` and `view_index_metadata` +privileges for the source indices. You must also have `index` and `read` privileges for the destination index. When +Elasticsearch security features are enabled, the transform remembers which roles the user who updated it had at the +time of update and runs with those privileges. + {ref}/update-transform.html[Endpoint documentation] [source,ts] ---- @@ -5390,7 +5922,7 @@ the event of transient failures while the transform is searching or indexing. The minimum value is 1s and the maximum is 1h. ** *`_meta` (Optional, Record)*: Defines optional transform metadata. ** *`source` (Optional, { index, query, remote, size, slice, sort, _source, runtime_mappings })*: The source of the data for the transform. -** *`settings` (Optional, { align_checkpoints, dates_as_epoch_millis, deduce_mappings, docs_per_second, max_page_search_size })*: Defines optional transform settings. +** *`settings` (Optional, { align_checkpoints, dates_as_epoch_millis, deduce_mappings, docs_per_second, max_page_search_size, unattended })*: Defines optional transform settings. ** *`sync` (Optional, { time })*: Defines the properties transforms require to run continuously. ** *`retention_policy` (Optional, { time } | null)*: Defines a retention policy for the transform. Data that meets the defined criteria is deleted from the destination index. diff --git a/src/api/api/async_search.ts b/src/api/api/async_search.ts index 9a51b2d..4a8cef9 100644 --- a/src/api/api/async_search.ts +++ b/src/api/api/async_search.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -44,7 +45,7 @@ export default class AsyncSearch { } /** - * Deletes an async search by ID. If the search is still running, the search request will be cancelled. Otherwise, the saved search results are deleted. + * Deletes an async search by identifier. If the search is still running, the search request will be cancelled. Otherwise, the saved search results are deleted. If the Elasticsearch security features are enabled, the deletion of a specific async search is restricted to: the authenticated user that submitted the original search request; users that have the `cancel_task` cluster privilege. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/async-search.html | Elasticsearch API documentation} */ async delete (this: That, params: T.AsyncSearchDeleteRequest | TB.AsyncSearchDeleteRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -66,11 +67,17 @@ export default class AsyncSearch { const method = 'DELETE' const path = `/_async_search/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'async_search.delete', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves the results of a previously submitted async search request given its ID. + * Retrieves the results of a previously submitted async search request given its identifier. If the Elasticsearch security features are enabled, access to the results of a specific async search is restricted to the user or API key that submitted it. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/async-search.html | Elasticsearch API documentation} */ async get> (this: That, params: T.AsyncSearchGetRequest | TB.AsyncSearchGetRequest, options?: TransportRequestOptionsWithOutMeta): Promise> @@ -92,11 +99,17 @@ export default class AsyncSearch { const method = 'GET' const path = `/_async_search/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'async_search.get', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves the status of a previously submitted async search request given its ID. + * Get async search status Retrieves the status of a previously submitted async search request given its identifier, without retrieving search results. If the Elasticsearch security features are enabled, use of this API is restricted to the `monitoring_user` role. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/async-search.html | Elasticsearch API documentation} */ async status (this: That, params: T.AsyncSearchStatusRequest | TB.AsyncSearchStatusRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -118,11 +131,17 @@ export default class AsyncSearch { const method = 'GET' const path = `/_async_search/status/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'async_search.status', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Executes a search request asynchronously. + * Runs a search request asynchronously. When the primary sort of the results is an indexed field, shards get sorted based on minimum and maximum value that they hold for that field, hence partial results become available following the sort criteria that was requested. Warning: Async search does not support scroll nor search requests that only include the suggest section. By default, Elasticsearch doesn’t allow you to store an async search response larger than 10Mb and an attempt to do this results in an error. The maximum allowed size for a stored async search response can be set by changing the `search.max_async_search_response_size` cluster level setting. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/async-search.html | Elasticsearch API documentation} */ async submit> (this: That, params?: T.AsyncSearchSubmitRequest | TB.AsyncSearchSubmitRequest, options?: TransportRequestOptionsWithOutMeta): Promise> @@ -130,7 +149,7 @@ export default class AsyncSearch { async submit> (this: That, params?: T.AsyncSearchSubmitRequest | TB.AsyncSearchSubmitRequest, options?: TransportRequestOptions): Promise> async submit> (this: That, params?: T.AsyncSearchSubmitRequest | TB.AsyncSearchSubmitRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = ['index'] - const acceptedBody: string[] = ['aggregations', 'aggs', 'collapse', 'explain', 'ext', 'from', 'highlight', 'track_total_hits', 'indices_boost', 'docvalue_fields', 'min_score', 'post_filter', 'profile', 'query', 'rescore', 'script_fields', 'search_after', 'size', 'slice', 'sort', '_source', 'fields', 'suggest', 'terminate_after', 'timeout', 'track_scores', 'version', 'seq_no_primary_term', 'stored_fields', 'pit', 'runtime_mappings', 'stats'] + const acceptedBody: string[] = ['aggregations', 'aggs', 'collapse', 'explain', 'ext', 'from', 'highlight', 'track_total_hits', 'indices_boost', 'docvalue_fields', 'knn', 'min_score', 'post_filter', 'profile', 'query', 'rescore', 'script_fields', 'search_after', 'size', 'slice', 'sort', '_source', 'fields', 'suggest', 'terminate_after', 'timeout', 'track_scores', 'version', 'seq_no_primary_term', 'stored_fields', 'pit', 'runtime_mappings', 'stats'] const querystring: Record = {} // @ts-expect-error const userBody: any = params?.body @@ -170,6 +189,12 @@ export default class AsyncSearch { method = 'POST' path = '/_async_search' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'async_search.submit', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/bulk.ts b/src/api/api/bulk.ts index f84ea17..b7a5dfa 100644 --- a/src/api/api/bulk.ts +++ b/src/api/api/bulk.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Allows to perform multiple index/update/delete operations in a single request. + * Bulk index or delete documents. Performs multiple indexing or delete operations in a single API call. This reduces overhead and can greatly increase indexing speed. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-bulk.html | Elasticsearch API documentation} */ export default async function BulkApi (this: That, params: T.BulkRequest | TB.BulkRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -72,5 +73,11 @@ export default async function BulkApi info > CAT APIs are only intended for human consumption using the command line or the Kibana console. They are not intended for use by applications. For application consumption, use [the /_alias endpoints](#endpoint-alias). * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-alias.html | Elasticsearch API documentation} */ async aliases (this: That, params?: T.CatAliasesRequest | TB.CatAliasesRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -74,11 +75,17 @@ export default class Cat { method = 'GET' path = '/_cat/aliases' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'cat.aliases', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns information about existing component_templates templates. + * Get component templates. Returns information about component templates in a cluster. Component templates are building blocks for constructing index templates that specify index mappings, settings, and aliases. > info > CAT APIs are only intended for human consumption using the command line or Kibana console. They are not intended for use by applications. For application consumption, use [the /_component_template endpoints](#endpoint-component-template). * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-component-templates.html | Elasticsearch API documentation} */ async componentTemplates (this: That, params?: T.CatComponentTemplatesRequest | TB.CatComponentTemplatesRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -108,11 +115,17 @@ export default class Cat { method = 'GET' path = '/_cat/component_templates' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'cat.component_templates', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Provides quick access to the document count of the entire cluster, or individual indices. + * Get a document count. Provides quick access to a document count for a data stream, an index, or an entire cluster.n/ The document count only includes live documents, not deleted documents which have not yet been removed by the merge process. > info > CAT APIs are only intended for human consumption using the command line or Kibana console. They are not intended for use by applications. For application consumption, use [the /_count endpoints](#endpoint-count). * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-count.html | Elasticsearch API documentation} */ async count (this: That, params?: T.CatCountRequest | TB.CatCountRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -142,11 +155,17 @@ export default class Cat { method = 'GET' path = '/_cat/count' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'cat.count', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns help for the Cat APIs. + * Get CAT help. Returns help for the CAT APIs. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/cat.html | Elasticsearch API documentation} */ async help (this: That, params?: T.CatHelpRequest | TB.CatHelpRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -169,11 +188,14 @@ export default class Cat { const method = 'GET' const path = '/_cat' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'cat.help' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns information about indices: number of primaries and replicas, document counts, disk size, ... + * Get index information. Returns high-level information about indices in a cluster, including backing indices for data streams. > info > CAT APIs are only intended for human consumption using the command line or Kibana console. They are not intended for use by applications. For application consumption, use an index endpoint. Use this request to get the following information for each index in a cluster: - shard count - document count - deleted document count - primary store size - total store size of all shards, including shard replicas These metrics are retrieved directly from Lucene, which Elasticsearch uses internally to power indexing and search. As a result, all document counts include hidden nested documents. To get an accurate count of Elasticsearch documents, use the [/_cat/count](#operation-cat-count) or [count](#endpoint-count) endpoints. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-indices.html | Elasticsearch API documentation} */ async indices (this: That, params?: T.CatIndicesRequest | TB.CatIndicesRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -203,11 +225,17 @@ export default class Cat { method = 'GET' path = '/_cat/indices' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'cat.indices', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Gets configuration and usage information about data frame analytics jobs. + * Get data frame analytics jobs. Returns configuration and usage information about data frame analytics jobs. > info > CAT APIs are only intended for human consumption using the Kibana console or command line. They are not intended for use by applications. For application consumption, use [the /_ml/data_frame/analytics endpoints](#endpoint-ml). * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-dfanalytics.html | Elasticsearch API documentation} */ async mlDataFrameAnalytics (this: That, params?: T.CatMlDataFrameAnalyticsRequest | TB.CatMlDataFrameAnalyticsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -237,11 +265,17 @@ export default class Cat { method = 'GET' path = '/_cat/ml/data_frame/analytics' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'cat.ml_data_frame_analytics', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Gets configuration and usage information about datafeeds. + * Get datafeeds. Returns configuration and usage information about datafeeds. This API returns a maximum of 10,000 datafeeds. If the Elasticsearch security features are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. > info > CAT APIs are only intended for human consumption using the Kibana console or command line. They are not intended for use by applications. For application consumption, use [the /_ml/datafeeds endpoints](#endpoint-ml). * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-datafeeds.html | Elasticsearch API documentation} */ async mlDatafeeds (this: That, params?: T.CatMlDatafeedsRequest | TB.CatMlDatafeedsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -271,11 +305,17 @@ export default class Cat { method = 'GET' path = '/_cat/ml/datafeeds' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'cat.ml_datafeeds', + pathParts: { + datafeed_id: params.datafeed_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Gets configuration and usage information about anomaly detection jobs. + * Get anomaly detection jobs. Returns configuration and usage information for anomaly detection jobs. This API returns a maximum of 10,000 jobs. If the Elasticsearch security features are enabled, you must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. > info > CAT APIs are only intended for human consumption using the Kibana console or command line. They are not intended for use by applications. For application consumption, use [the /_ml/anomaly_detectors endpoints](#endpoint-ml). * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-anomaly-detectors.html | Elasticsearch API documentation} */ async mlJobs (this: That, params?: T.CatMlJobsRequest | TB.CatMlJobsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -305,11 +345,17 @@ export default class Cat { method = 'GET' path = '/_cat/ml/anomaly_detectors' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'cat.ml_jobs', + pathParts: { + job_id: params.job_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Gets configuration and usage information about inference trained models. + * Get trained models. Returns configuration and usage information about inference trained models. > info > CAT APIs are only intended for human consumption using the Kibana console or command line. They are not intended for use by applications. For application consumption, use [the /_ml/trained_models endpoints](#endpoint-ml). * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-trained-model.html | Elasticsearch API documentation} */ async mlTrainedModels (this: That, params?: T.CatMlTrainedModelsRequest | TB.CatMlTrainedModelsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -339,11 +385,17 @@ export default class Cat { method = 'GET' path = '/_cat/ml/trained_models' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'cat.ml_trained_models', + pathParts: { + model_id: params.model_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Gets configuration and usage information about transforms. + * Get transforms. Returns configuration and usage information about transforms. > info > CAT APIs are only intended for human consumption using the Kibana console or command line. They are not intended for use by applications. For application consumption, use [the /_transform endpoints](#endpoint-transform). * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/cat-transforms.html | Elasticsearch API documentation} */ async transforms (this: That, params?: T.CatTransformsRequest | TB.CatTransformsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -373,6 +425,12 @@ export default class Cat { method = 'GET' path = '/_cat/transforms' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'cat.transforms', + pathParts: { + transform_id: params.transform_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/clear_scroll.ts b/src/api/api/clear_scroll.ts index 99eae72..7c4848d 100644 --- a/src/api/api/clear_scroll.ts +++ b/src/api/api/clear_scroll.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Explicitly clears the search context for a scroll. + * Clears the search context and results for a scrolling search. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/clear-scroll-api.html | Elasticsearch API documentation} */ export default async function ClearScrollApi (this: That, params?: T.ClearScrollRequest | TB.ClearScrollRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -73,5 +74,11 @@ export default async function ClearScrollApi (this: That, params?: T.ClearScroll const method = 'DELETE' const path = '/_search/scroll' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'clear_scroll', + pathParts: { + scroll_id: params.scroll_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/close_point_in_time.ts b/src/api/api/close_point_in_time.ts index 3806890..c4c779e 100644 --- a/src/api/api/close_point_in_time.ts +++ b/src/api/api/close_point_in_time.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Close a point in time + * Closes a point-in-time. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/point-in-time-api.html | Elasticsearch API documentation} */ export default async function ClosePointInTimeApi (this: That, params: T.ClosePointInTimeRequest | TB.ClosePointInTimeRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -72,5 +73,8 @@ export default async function ClosePointInTimeApi (this: That, params: T.ClosePo const method = 'DELETE' const path = '/_pit' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'close_point_in_time' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/cluster.ts b/src/api/api/cluster.ts index 40a69a8..7602714 100644 --- a/src/api/api/cluster.ts +++ b/src/api/api/cluster.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -44,7 +45,7 @@ export default class Cluster { } /** - * Deletes a component template + * Delete component templates. Deletes component templates. Component templates are building blocks for constructing index templates that specify index mappings, settings, and aliases. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html | Elasticsearch API documentation} */ async deleteComponentTemplate (this: That, params: T.ClusterDeleteComponentTemplateRequest | TB.ClusterDeleteComponentTemplateRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -66,11 +67,17 @@ export default class Cluster { const method = 'DELETE' const path = `/_component_template/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'cluster.delete_component_template', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns information about whether a particular component template exist + * Check component templates. Returns information about whether a particular component template exists. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html | Elasticsearch API documentation} */ async existsComponentTemplate (this: That, params: T.ClusterExistsComponentTemplateRequest | TB.ClusterExistsComponentTemplateRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -92,11 +99,17 @@ export default class Cluster { const method = 'HEAD' const path = `/_component_template/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'cluster.exists_component_template', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns one or more component templates + * Get component templates. Retrieves information about component templates. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html | Elasticsearch API documentation} */ async getComponentTemplate (this: That, params?: T.ClusterGetComponentTemplateRequest | TB.ClusterGetComponentTemplateRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -126,11 +139,17 @@ export default class Cluster { method = 'GET' path = '/_component_template' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'cluster.get_component_template', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns different information about the cluster. + * Get cluster info. Returns basic information about the cluster. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-info.html | Elasticsearch API documentation} */ async info (this: That, params: T.ClusterInfoRequest | TB.ClusterInfoRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -152,11 +171,17 @@ export default class Cluster { const method = 'GET' const path = `/_info/${encodeURIComponent(params.target.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'cluster.info', + pathParts: { + target: params.target + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Creates or updates a component template + * Create or update a component template. Creates or updates a component template. Component templates are building blocks for constructing index templates that specify index mappings, settings, and aliases. An index template can be composed of multiple component templates. To use a component template, specify it in an index template’s `composed_of` list. Component templates are only applied to new data streams and indices as part of a matching index template. Settings and mappings specified directly in the index template or the create index request override any settings or mappings specified in a component template. Component templates are only used during index creation. For data streams, this includes data stream creation and the creation of a stream’s backing indices. Changes to component templates do not affect existing indices, including a stream’s backing indices. You can use C-style `/* *\/` block comments in component templates. You can include comments anywhere in the request body except before the opening curly bracket. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-component-template.html | Elasticsearch API documentation} */ async putComponentTemplate (this: That, params: T.ClusterPutComponentTemplateRequest | TB.ClusterPutComponentTemplateRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -190,6 +215,12 @@ export default class Cluster { const method = 'PUT' const path = `/_component_template/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'cluster.put_component_template', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/count.ts b/src/api/api/count.ts index 0d01dbb..32e09ad 100644 --- a/src/api/api/count.ts +++ b/src/api/api/count.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -80,5 +81,11 @@ export default async function CountApi (this: That, params?: T.CountRequest | TB method = body != null ? 'POST' : 'GET' path = '/_count' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'count', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/create.ts b/src/api/api/create.ts index 622f8eb..f130eb4 100644 --- a/src/api/api/create.ts +++ b/src/api/api/create.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Creates a new document in the index. Returns a 409 response when a document with a same ID already exists in the index. + * Index a document. Adds a JSON document to the specified data stream or index and makes it searchable. If the target is an index and the document already exists, the request updates the document and increments its version. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-index_.html | Elasticsearch API documentation} */ export default async function CreateApi (this: That, params: T.CreateRequest | TB.CreateRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -65,5 +66,12 @@ export default async function CreateApi (this: That, params const method = 'PUT' const path = `/${encodeURIComponent(params.index.toString())}/_create/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'create', + pathParts: { + id: params.id, + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/delete.ts b/src/api/api/delete.ts index f834f72..387a223 100644 --- a/src/api/api/delete.ts +++ b/src/api/api/delete.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Removes a document from the index. + * Delete a document. Removes a JSON document from the specified index. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-delete.html | Elasticsearch API documentation} */ export default async function DeleteApi (this: That, params: T.DeleteRequest | TB.DeleteRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -60,5 +61,12 @@ export default async function DeleteApi (this: That, params: T.DeleteRequest | T const method = 'DELETE' const path = `/${encodeURIComponent(params.index.toString())}/_doc/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'delete', + pathParts: { + id: params.id, + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/delete_by_query.ts b/src/api/api/delete_by_query.ts index fc62679..68776b0 100644 --- a/src/api/api/delete_by_query.ts +++ b/src/api/api/delete_by_query.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Deletes documents matching the provided query. + * Delete documents. Deletes documents that match the specified query. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-delete-by-query.html | Elasticsearch API documentation} */ export default async function DeleteByQueryApi (this: That, params: T.DeleteByQueryRequest | TB.DeleteByQueryRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -72,5 +73,11 @@ export default async function DeleteByQueryApi (this: That, params: T.DeleteByQu const method = 'POST' const path = `/${encodeURIComponent(params.index.toString())}/_delete_by_query` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'delete_by_query', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/delete_script.ts b/src/api/api/delete_script.ts index 5c84921..801d4aa 100644 --- a/src/api/api/delete_script.ts +++ b/src/api/api/delete_script.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Deletes a script. + * Delete a script or search template. Deletes a stored script or search template. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting.html | Elasticsearch API documentation} */ export default async function DeleteScriptApi (this: That, params: T.DeleteScriptRequest | TB.DeleteScriptRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -60,5 +61,11 @@ export default async function DeleteScriptApi (this: That, params: T.DeleteScrip const method = 'DELETE' const path = `/_scripts/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'delete_script', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/enrich.ts b/src/api/api/enrich.ts index f2f0f68..ada26a2 100644 --- a/src/api/api/enrich.ts +++ b/src/api/api/enrich.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -44,7 +45,7 @@ export default class Enrich { } /** - * Deletes an existing enrich policy and its enrich index. + * Delete an enrich policy. Deletes an existing enrich policy and its enrich index. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-enrich-policy-api.html | Elasticsearch API documentation} */ async deletePolicy (this: That, params: T.EnrichDeletePolicyRequest | TB.EnrichDeletePolicyRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -66,7 +67,13 @@ export default class Enrich { const method = 'DELETE' const path = `/_enrich/policy/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'enrich.delete_policy', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -92,11 +99,17 @@ export default class Enrich { const method = 'PUT' const path = `/_enrich/policy/${encodeURIComponent(params.name.toString())}/_execute` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'enrich.execute_policy', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Gets information about an enrich policy. + * Get an enrich policy. Returns information about an enrich policy. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/get-enrich-policy-api.html | Elasticsearch API documentation} */ async getPolicy (this: That, params?: T.EnrichGetPolicyRequest | TB.EnrichGetPolicyRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -126,11 +139,17 @@ export default class Enrich { method = 'GET' path = '/_enrich/policy' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'enrich.get_policy', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Creates a new enrich policy. + * Create an enrich policy. Creates an enrich policy. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/put-enrich-policy-api.html | Elasticsearch API documentation} */ async putPolicy (this: That, params: T.EnrichPutPolicyRequest | TB.EnrichPutPolicyRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -164,11 +183,17 @@ export default class Enrich { const method = 'PUT' const path = `/_enrich/policy/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'enrich.put_policy', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Gets enrich coordinator statistics and information about enrich policies that are currently executing. + * Get enrich stats. Returns enrich coordinator statistics and information about enrich policies that are currently executing. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/enrich-stats-api.html | Elasticsearch API documentation} */ async stats (this: That, params?: T.EnrichStatsRequest | TB.EnrichStatsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -191,6 +216,9 @@ export default class Enrich { const method = 'GET' const path = '/_enrich/_stats' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'enrich.stats' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/eql.ts b/src/api/api/eql.ts index 0bec401..0e0ddd8 100644 --- a/src/api/api/eql.ts +++ b/src/api/api/eql.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -44,7 +45,7 @@ export default class Eql { } /** - * Deletes an async EQL search by ID. If the search is still running, the search request will be cancelled. Otherwise, the saved search results are deleted. + * Deletes an async EQL search or a stored synchronous EQL search. The API also deletes results for the search. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/eql-search-api.html | Elasticsearch API documentation} */ async delete (this: That, params: T.EqlDeleteRequest | TB.EqlDeleteRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -66,11 +67,17 @@ export default class Eql { const method = 'DELETE' const path = `/_eql/search/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'eql.delete', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns async results from previously executed Event Query Language (EQL) search + * Returns the current status and available results for an async EQL search or a stored synchronous EQL search. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/get-async-eql-search-api.html | Elasticsearch API documentation} */ async get (this: That, params: T.EqlGetRequest | TB.EqlGetRequest, options?: TransportRequestOptionsWithOutMeta): Promise> @@ -92,11 +99,17 @@ export default class Eql { const method = 'GET' const path = `/_eql/search/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'eql.get', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns the status of a previously submitted async or stored Event Query Language (EQL) search + * Returns the current status for an async EQL search or a stored synchronous EQL search without returning results. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/get-async-eql-status-api.html | Elasticsearch API documentation} */ async getStatus (this: That, params: T.EqlGetStatusRequest | TB.EqlGetStatusRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -118,7 +131,13 @@ export default class Eql { const method = 'GET' const path = `/_eql/search/status/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'eql.get_status', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -130,7 +149,7 @@ export default class Eql { async search (this: That, params: T.EqlSearchRequest | TB.EqlSearchRequest, options?: TransportRequestOptions): Promise> async search (this: That, params: T.EqlSearchRequest | TB.EqlSearchRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = ['index'] - const acceptedBody: string[] = ['query', 'case_sensitive', 'event_category_field', 'tiebreaker_field', 'timestamp_field', 'fetch_size', 'filter', 'keep_alive', 'keep_on_completion', 'wait_for_completion_timeout', 'size', 'fields', 'result_position'] + const acceptedBody: string[] = ['query', 'case_sensitive', 'event_category_field', 'tiebreaker_field', 'timestamp_field', 'fetch_size', 'filter', 'keep_alive', 'keep_on_completion', 'wait_for_completion_timeout', 'size', 'fields', 'result_position', 'runtime_mappings'] const querystring: Record = {} // @ts-expect-error const userBody: any = params?.body @@ -156,6 +175,12 @@ export default class Eql { const method = body != null ? 'POST' : 'GET' const path = `/${encodeURIComponent(params.index.toString())}/_eql/search` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'eql.search', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/esql.ts b/src/api/api/esql.ts index 4d211a1..7f8425d 100644 --- a/src/api/api/esql.ts +++ b/src/api/api/esql.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -44,7 +45,7 @@ export default class Esql { } /** - * Executes an ESQL request + * Executes an ES|QL request * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/esql-rest.html | Elasticsearch API documentation} */ async query (this: That, params: T.EsqlQueryRequest | TB.EsqlQueryRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -52,7 +53,7 @@ export default class Esql { async query (this: That, params: T.EsqlQueryRequest | TB.EsqlQueryRequest, options?: TransportRequestOptions): Promise async query (this: That, params: T.EsqlQueryRequest | TB.EsqlQueryRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = [] - const acceptedBody: string[] = ['columnar', 'filter', 'locale', 'params', 'query'] + const acceptedBody: string[] = ['columnar', 'filter', 'locale', 'params', 'profile', 'query', 'tables'] const querystring: Record = {} // @ts-expect-error const userBody: any = params?.body @@ -78,6 +79,9 @@ export default class Esql { const method = 'POST' const path = '/_query' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'esql.query' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/exists.ts b/src/api/api/exists.ts index a0448c0..8f5033e 100644 --- a/src/api/api/exists.ts +++ b/src/api/api/exists.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Returns information about whether a document exists in an index. + * Check a document. Checks if a specified document exists. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html | Elasticsearch API documentation} */ export default async function ExistsApi (this: That, params: T.ExistsRequest | TB.ExistsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -60,5 +61,12 @@ export default async function ExistsApi (this: That, params: T.ExistsRequest | T const method = 'HEAD' const path = `/${encodeURIComponent(params.index.toString())}/_doc/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'exists', + pathParts: { + id: params.id, + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/exists_source.ts b/src/api/api/exists_source.ts index ba264a3..8c6f144 100644 --- a/src/api/api/exists_source.ts +++ b/src/api/api/exists_source.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Returns information about whether a document source exists in an index. + * Check for a document source. Checks if a document's `_source` is stored. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html | Elasticsearch API documentation} */ export default async function ExistsSourceApi (this: That, params: T.ExistsSourceRequest | TB.ExistsSourceRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -60,5 +61,12 @@ export default async function ExistsSourceApi (this: That, params: T.ExistsSourc const method = 'HEAD' const path = `/${encodeURIComponent(params.index.toString())}/_source/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'exists_source', + pathParts: { + id: params.id, + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/explain.ts b/src/api/api/explain.ts index 6e910ff..a65f6dc 100644 --- a/src/api/api/explain.ts +++ b/src/api/api/explain.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Returns information about why a specific matches (or doesn't match) a query. + * Explain a document match result. Returns information about why a specific document matches, or doesn’t match, a query. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/search-explain.html | Elasticsearch API documentation} */ export default async function ExplainApi (this: That, params: T.ExplainRequest | TB.ExplainRequest, options?: TransportRequestOptionsWithOutMeta): Promise> @@ -72,5 +73,12 @@ export default async function ExplainApi (this: That, param const method = body != null ? 'POST' : 'GET' const path = `/${encodeURIComponent(params.index.toString())}/_explain/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'explain', + pathParts: { + id: params.id, + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/field_caps.ts b/src/api/api/field_caps.ts index 94ff25d..7a2c5bc 100644 --- a/src/api/api/field_caps.ts +++ b/src/api/api/field_caps.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Returns the information about the capabilities of fields among multiple indices. + * The field capabilities API returns the information about the capabilities of fields among multiple indices. The field capabilities API returns runtime fields like any other field. For example, a runtime field with a type of keyword is returned as any other field that belongs to the `keyword` family. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/search-field-caps.html | Elasticsearch API documentation} */ export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequest | TB.FieldCapsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -46,7 +47,7 @@ export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequ export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequest | TB.FieldCapsRequest, options?: TransportRequestOptions): Promise export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequest | TB.FieldCapsRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = ['index'] - const acceptedBody: string[] = ['index_filter'] + const acceptedBody: string[] = ['fields', 'index_filter', 'runtime_mappings'] const querystring: Record = {} // @ts-expect-error const userBody: any = params?.body @@ -80,5 +81,11 @@ export default async function FieldCapsApi (this: That, params?: T.FieldCapsRequ method = body != null ? 'POST' : 'GET' path = '/_field_caps' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'field_caps', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/get.ts b/src/api/api/get.ts index 96f3125..3a64e8f 100644 --- a/src/api/api/get.ts +++ b/src/api/api/get.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Returns a document. + * Get a document by its ID. Retrieves the document with the specified ID from an index. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html | Elasticsearch API documentation} */ export default async function GetApi (this: That, params: T.GetRequest | TB.GetRequest, options?: TransportRequestOptionsWithOutMeta): Promise> @@ -60,5 +61,12 @@ export default async function GetApi (this: That, params: T const method = 'GET' const path = `/${encodeURIComponent(params.index.toString())}/_doc/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'get', + pathParts: { + id: params.id, + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/get_script.ts b/src/api/api/get_script.ts index b2c4a03..e84a69d 100644 --- a/src/api/api/get_script.ts +++ b/src/api/api/get_script.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Returns a script. + * Get a script or search template. Retrieves a stored script or search template. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting.html | Elasticsearch API documentation} */ export default async function GetScriptApi (this: That, params: T.GetScriptRequest | TB.GetScriptRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -60,5 +61,11 @@ export default async function GetScriptApi (this: That, params: T.GetScriptReque const method = 'GET' const path = `/_scripts/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'get_script', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/get_source.ts b/src/api/api/get_source.ts index 7f92d9b..79abeda 100644 --- a/src/api/api/get_source.ts +++ b/src/api/api/get_source.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Returns the source of a document. + * Get a document's source. Returns the source of a document. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-get.html | Elasticsearch API documentation} */ export default async function GetSourceApi (this: That, params: T.GetSourceRequest | TB.GetSourceRequest, options?: TransportRequestOptionsWithOutMeta): Promise> @@ -60,5 +61,12 @@ export default async function GetSourceApi (this: That, par const method = 'GET' const path = `/${encodeURIComponent(params.index.toString())}/_source/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'get_source', + pathParts: { + id: params.id, + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/graph.ts b/src/api/api/graph.ts index 666a409..01d14aa 100644 --- a/src/api/api/graph.ts +++ b/src/api/api/graph.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -44,7 +45,7 @@ export default class Graph { } /** - * Explore extracted and summarized information about the documents and terms in an index. + * Extracts and summarizes information about the documents and terms in an Elasticsearch data stream or index. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/graph-explore-api.html | Elasticsearch API documentation} */ async explore (this: That, params: T.GraphExploreRequest | TB.GraphExploreRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -78,6 +79,12 @@ export default class Graph { const method = body != null ? 'POST' : 'GET' const path = `/${encodeURIComponent(params.index.toString())}/_graph/explore` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'graph.explore', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/index.ts b/src/api/api/index.ts index b156d47..89fba41 100644 --- a/src/api/api/index.ts +++ b/src/api/api/index.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Creates or updates a document in an index. + * Index a document. Adds a JSON document to the specified data stream or index and makes it searchable. If the target is an index and the document already exists, the request updates the document and increments its version. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-index_.html | Elasticsearch API documentation} */ export default async function IndexApi (this: That, params: T.IndexRequest | TB.IndexRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -72,5 +73,12 @@ export default async function IndexApi (this: That, params: method = 'POST' path = `/${encodeURIComponent(params.index.toString())}/_doc` } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'index', + pathParts: { + id: params.id, + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/indices.ts b/src/api/api/indices.ts index 2acb036..0ff490d 100644 --- a/src/api/api/indices.ts +++ b/src/api/api/indices.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -44,7 +45,7 @@ export default class Indices { } /** - * Adds a block to an index. + * Add an index block. Limits the operations allowed on an index by blocking specific operation types. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/index-modules-blocks.html | Elasticsearch API documentation} */ async addBlock (this: That, params: T.IndicesAddBlockRequest | TB.IndicesAddBlockRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -66,11 +67,18 @@ export default class Indices { const method = 'PUT' const path = `/${encodeURIComponent(params.index.toString())}/_block/${encodeURIComponent(params.block.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.add_block', + pathParts: { + index: params.index, + block: params.block + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Performs the analysis process on a text and return the tokens breakdown of the text. + * Performs analysis on a text string and returns the resulting tokens. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-analyze.html | Elasticsearch API documentation} */ async analyze (this: That, params?: T.IndicesAnalyzeRequest | TB.IndicesAnalyzeRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -112,11 +120,17 @@ export default class Indices { method = body != null ? 'POST' : 'GET' path = '/_analyze' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.analyze', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Creates an index with optional settings and mappings. + * Create an index. Creates a new index. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-create-index.html | Elasticsearch API documentation} */ async create (this: That, params: T.IndicesCreateRequest | TB.IndicesCreateRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -150,11 +164,17 @@ export default class Indices { const method = 'PUT' const path = `/${encodeURIComponent(params.index.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.create', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Creates a data stream + * Create a data stream. Creates a data stream. You must have a matching index template with data stream enabled. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html | Elasticsearch API documentation} */ async createDataStream (this: That, params: T.IndicesCreateDataStreamRequest | TB.IndicesCreateDataStreamRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -176,11 +196,17 @@ export default class Indices { const method = 'PUT' const path = `/_data_stream/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.create_data_stream', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Provides statistics on operations happening in a data stream. + * Get data stream stats. Retrieves statistics for one or more data streams. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html | Elasticsearch API documentation} */ async dataStreamsStats (this: That, params?: T.IndicesDataStreamsStatsRequest | TB.IndicesDataStreamsStatsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -210,11 +236,17 @@ export default class Indices { method = 'GET' path = '/_data_stream/_stats' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.data_streams_stats', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Deletes an index. + * Delete indices. Deletes one or more indices. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-delete-index.html | Elasticsearch API documentation} */ async delete (this: That, params: T.IndicesDeleteRequest | TB.IndicesDeleteRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -236,11 +268,17 @@ export default class Indices { const method = 'DELETE' const path = `/${encodeURIComponent(params.index.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.delete', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Deletes an alias. + * Delete an alias. Removes a data stream or index from an alias. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-aliases.html | Elasticsearch API documentation} */ async deleteAlias (this: That, params: T.IndicesDeleteAliasRequest | TB.IndicesDeleteAliasRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -269,11 +307,18 @@ export default class Indices { method = 'DELETE' path = `/${encodeURIComponent(params.index.toString())}/_aliases/${encodeURIComponent(params.name.toString())}` } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.delete_alias', + pathParts: { + index: params.index, + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Deletes the data stream lifecycle of the selected data streams. + * Delete data stream lifecycles. Removes the data stream lifecycle from a data stream, rendering it not managed by the data stream lifecycle. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams-delete-lifecycle.html | Elasticsearch API documentation} */ async deleteDataLifecycle (this: That, params: T.IndicesDeleteDataLifecycleRequest | TB.IndicesDeleteDataLifecycleRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -295,11 +340,17 @@ export default class Indices { const method = 'DELETE' const path = `/_data_stream/${encodeURIComponent(params.name.toString())}/_lifecycle` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.delete_data_lifecycle', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Deletes a data stream. + * Delete data streams. Deletes one or more data streams and their backing indices. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html | Elasticsearch API documentation} */ async deleteDataStream (this: That, params: T.IndicesDeleteDataStreamRequest | TB.IndicesDeleteDataStreamRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -321,11 +372,17 @@ export default class Indices { const method = 'DELETE' const path = `/_data_stream/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.delete_data_stream', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Deletes an index template. + * Delete an index template. The provided may contain multiple template names separated by a comma. If multiple template names are specified then there is no wildcard support and the provided names should match completely with existing templates. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-delete-template.html | Elasticsearch API documentation} */ async deleteIndexTemplate (this: That, params: T.IndicesDeleteIndexTemplateRequest | TB.IndicesDeleteIndexTemplateRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -347,11 +404,17 @@ export default class Indices { const method = 'DELETE' const path = `/_index_template/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.delete_index_template', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns information about whether a particular index exists. + * Check indices. Checks if one or more indices, index aliases, or data streams exist. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-exists.html | Elasticsearch API documentation} */ async exists (this: That, params: T.IndicesExistsRequest | TB.IndicesExistsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -373,11 +436,17 @@ export default class Indices { const method = 'HEAD' const path = `/${encodeURIComponent(params.index.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.exists', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns information about whether a particular alias exists. + * Check aliases. Checks if one or more data stream or index aliases exist. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-aliases.html | Elasticsearch API documentation} */ async existsAlias (this: That, params: T.IndicesExistsAliasRequest | TB.IndicesExistsAliasRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -406,7 +475,14 @@ export default class Indices { method = 'HEAD' path = `/_alias/${encodeURIComponent(params.name.toString())}` } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.exists_alias', + pathParts: { + name: params.name, + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -432,11 +508,17 @@ export default class Indices { const method = 'HEAD' const path = `/_index_template/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.exists_index_template', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves information about the index's current data stream lifecycle, such as any potential encountered error, time since creation etc. + * Get the status for a data stream lifecycle. Retrieves information about an index or data stream’s current data stream lifecycle status, such as time since index creation, time since rollover, the lifecycle configuration managing the index, or any errors encountered during lifecycle execution. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams-explain-lifecycle.html | Elasticsearch API documentation} */ async explainDataLifecycle (this: That, params: T.IndicesExplainDataLifecycleRequest | TB.IndicesExplainDataLifecycleRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -458,11 +540,17 @@ export default class Indices { const method = 'GET' const path = `/${encodeURIComponent(params.index.toString())}/_lifecycle/explain` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.explain_data_lifecycle', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns information about one or more indices. + * Get index information. Returns information about one or more indices. For data streams, the API returns information about the stream’s backing indices. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-get-index.html | Elasticsearch API documentation} */ async get (this: That, params: T.IndicesGetRequest | TB.IndicesGetRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -484,11 +572,17 @@ export default class Indices { const method = 'GET' const path = `/${encodeURIComponent(params.index.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.get', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns an alias. + * Get aliases. Retrieves information for one or more data stream or index aliases. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-aliases.html | Elasticsearch API documentation} */ async getAlias (this: That, params?: T.IndicesGetAliasRequest | TB.IndicesGetAliasRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -524,11 +618,18 @@ export default class Indices { method = 'GET' path = '/_alias' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.get_alias', + pathParts: { + name: params.name, + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns the data stream lifecycle of the selected data streams. + * Get data stream lifecycles. Retrieves the data stream lifecycle configuration of one or more data streams. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams-get-lifecycle.html | Elasticsearch API documentation} */ async getDataLifecycle (this: That, params: T.IndicesGetDataLifecycleRequest | TB.IndicesGetDataLifecycleRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -550,11 +651,17 @@ export default class Indices { const method = 'GET' const path = `/_data_stream/${encodeURIComponent(params.name.toString())}/_lifecycle` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.get_data_lifecycle', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns data streams. + * Get data streams. Retrieves information about one or more data streams. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html | Elasticsearch API documentation} */ async getDataStream (this: That, params?: T.IndicesGetDataStreamRequest | TB.IndicesGetDataStreamRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -584,11 +691,17 @@ export default class Indices { method = 'GET' path = '/_data_stream' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.get_data_stream', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns an index template. + * Get index templates. Returns information about one or more index templates. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-get-template.html | Elasticsearch API documentation} */ async getIndexTemplate (this: That, params?: T.IndicesGetIndexTemplateRequest | TB.IndicesGetIndexTemplateRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -618,11 +731,17 @@ export default class Indices { method = 'GET' path = '/_index_template' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.get_index_template', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns mappings for one or more indices. + * Get mapping definitions. Retrieves mapping definitions for one or more indices. For data streams, the API retrieves mappings for the stream’s backing indices. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-get-mapping.html | Elasticsearch API documentation} */ async getMapping (this: That, params?: T.IndicesGetMappingRequest | TB.IndicesGetMappingRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -652,11 +771,17 @@ export default class Indices { method = 'GET' path = '/_mapping' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.get_mapping', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns settings for one or more indices. + * Get index settings. Returns setting information for one or more indices. For data streams, returns setting information for the stream’s backing indices. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-get-settings.html | Elasticsearch API documentation} */ async getSettings (this: That, params?: T.IndicesGetSettingsRequest | TB.IndicesGetSettingsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -692,11 +817,18 @@ export default class Indices { method = 'GET' path = '/_settings' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.get_settings', + pathParts: { + index: params.index, + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Migrates an alias to a data stream + * Convert an index alias to a data stream. Converts an index alias to a data stream. You must have a matching index template that is data stream enabled. The alias must meet the following criteria: The alias must have a write index; All indices for the alias must have a `@timestamp` field mapping of a `date` or `date_nanos` field type; The alias must not have any filters; The alias must not use custom routing. If successful, the request removes the alias and creates a data stream with the same name. The indices for the alias become hidden backing indices for the stream. The write index for the alias becomes the write index for the stream. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html | Elasticsearch API documentation} */ async migrateToDataStream (this: That, params: T.IndicesMigrateToDataStreamRequest | TB.IndicesMigrateToDataStreamRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -718,11 +850,17 @@ export default class Indices { const method = 'POST' const path = `/_data_stream/_migrate/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.migrate_to_data_stream', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Modifies a data stream + * Update data streams. Performs one or more data stream modification actions in a single atomic operation. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams.html | Elasticsearch API documentation} */ async modifyDataStream (this: That, params: T.IndicesModifyDataStreamRequest | TB.IndicesModifyDataStreamRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -756,11 +894,14 @@ export default class Indices { const method = 'POST' const path = '/_data_stream/_modify' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.modify_data_stream' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Creates or updates an alias. + * Create or update an alias. Adds a data stream or index to an alias. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-aliases.html | Elasticsearch API documentation} */ async putAlias (this: That, params: T.IndicesPutAliasRequest | TB.IndicesPutAliasRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -801,11 +942,18 @@ export default class Indices { method = 'PUT' path = `/${encodeURIComponent(params.index.toString())}/_aliases/${encodeURIComponent(params.name.toString())}` } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.put_alias', + pathParts: { + index: params.index, + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Updates the data stream lifecycle of the selected data streams. + * Update data stream lifecycles. Update the data stream lifecycle of the specified data streams. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/data-streams-put-lifecycle.html | Elasticsearch API documentation} */ async putDataLifecycle (this: That, params: T.IndicesPutDataLifecycleRequest | TB.IndicesPutDataLifecycleRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -839,11 +987,17 @@ export default class Indices { const method = 'PUT' const path = `/_data_stream/${encodeURIComponent(params.name.toString())}/_lifecycle` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.put_data_lifecycle', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Creates or updates an index template. + * Create or update an index template. Index templates define settings, mappings, and aliases that can be applied automatically to new indices. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-put-template.html | Elasticsearch API documentation} */ async putIndexTemplate (this: That, params: T.IndicesPutIndexTemplateRequest | TB.IndicesPutIndexTemplateRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -877,11 +1031,17 @@ export default class Indices { const method = 'PUT' const path = `/_index_template/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.put_index_template', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Updates the index mappings. + * Update field mappings. Adds new fields to an existing data stream or index. You can also use this API to change the search settings of existing fields. For data streams, these changes are applied to all backing indices by default. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-put-mapping.html | Elasticsearch API documentation} */ async putMapping (this: That, params: T.IndicesPutMappingRequest | TB.IndicesPutMappingRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -915,11 +1075,17 @@ export default class Indices { const method = 'PUT' const path = `/${encodeURIComponent(params.index.toString())}/_mapping` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.put_mapping', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Updates the index settings. + * Update index settings. Changes dynamic index settings in real time. For data streams, index setting changes are applied to all backing indices by default. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-update-settings.html | Elasticsearch API documentation} */ async putSettings (this: That, params: T.IndicesPutSettingsRequest | TB.IndicesPutSettingsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -953,11 +1119,17 @@ export default class Indices { method = 'PUT' path = '/_settings' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.put_settings', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Creates or updates an index template. + * Create or update an index template. Index templates define settings, mappings, and aliases that can be applied automatically to new indices. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-templates-v1.html | Elasticsearch API documentation} */ async putTemplate (this: That, params: T.IndicesPutTemplateRequest | TB.IndicesPutTemplateRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -991,11 +1163,17 @@ export default class Indices { const method = 'PUT' const path = `/_template/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.put_template', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Performs the refresh operation in one or more indices. + * Refresh an index. A refresh makes recent operations performed on one or more indices available for search. For data streams, the API runs the refresh operation on the stream’s backing indices. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-refresh.html | Elasticsearch API documentation} */ async refresh (this: That, params?: T.IndicesRefreshRequest | TB.IndicesRefreshRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1025,11 +1203,17 @@ export default class Indices { method = body != null ? 'POST' : 'GET' path = '/_refresh' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.refresh', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns information about any matching indices, aliases, and data streams + * Resolves the specified name(s) and/or index patterns for indices, aliases, and data streams. Multiple patterns and remote clusters are supported. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-resolve-index-api.html | Elasticsearch API documentation} */ async resolveIndex (this: That, params: T.IndicesResolveIndexRequest | TB.IndicesResolveIndexRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1051,11 +1235,17 @@ export default class Indices { const method = 'GET' const path = `/_resolve/index/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.resolve_index', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Updates an alias to point to a new index when the existing index is considered to be too large or too old. + * Roll over to a new index. Creates a new index for a data stream or index alias. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-rollover-index.html | Elasticsearch API documentation} */ async rollover (this: That, params: T.IndicesRolloverRequest | TB.IndicesRolloverRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1096,11 +1286,18 @@ export default class Indices { method = 'POST' path = `/${encodeURIComponent(params.alias.toString())}/_rollover` } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.rollover', + pathParts: { + alias: params.alias, + new_index: params.new_index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Simulate matching the given index name against the index templates in the system + * Simulate an index. Returns the index configuration that would be applied to the specified index from an existing index template. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-simulate-index.html | Elasticsearch API documentation} */ async simulateIndexTemplate (this: That, params: T.IndicesSimulateIndexTemplateRequest | TB.IndicesSimulateIndexTemplateRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1122,11 +1319,17 @@ export default class Indices { const method = 'POST' const path = `/_index_template/_simulate_index/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.simulate_index_template', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Simulate resolving the given template name or body + * Simulate an index template. Returns the index configuration that would be applied by a particular index template. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-simulate-template.html | Elasticsearch API documentation} */ async simulateTemplate (this: That, params?: T.IndicesSimulateTemplateRequest | TB.IndicesSimulateTemplateRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1168,11 +1371,17 @@ export default class Indices { method = 'POST' path = '/_index_template/_simulate' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.simulate_template', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Updates index aliases. + * Create or update an alias. Adds a data stream or index to an alias. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-aliases.html | Elasticsearch API documentation} */ async updateAliases (this: That, params?: T.IndicesUpdateAliasesRequest | TB.IndicesUpdateAliasesRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1207,11 +1416,14 @@ export default class Indices { const method = 'POST' const path = '/_aliases' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.update_aliases' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Allows a user to validate a potentially expensive query without executing it. + * Validate a query. Validates a query without running it. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/search-validate.html | Elasticsearch API documentation} */ async validateQuery (this: That, params?: T.IndicesValidateQueryRequest | TB.IndicesValidateQueryRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1253,6 +1465,12 @@ export default class Indices { method = body != null ? 'POST' : 'GET' path = '/_validate/query' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'indices.validate_query', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/inference.ts b/src/api/api/inference.ts index 16dc527..ad69cb8 100644 --- a/src/api/api/inference.ts +++ b/src/api/api/inference.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -73,7 +74,14 @@ export default class Inference { method = 'DELETE' path = `/_inference/${encodeURIComponent(params.inference_id.toString())}` } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'inference.delete', + pathParts: { + task_type: params.task_type, + inference_id: params.inference_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -110,11 +118,18 @@ export default class Inference { method = 'GET' path = '/_inference' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'inference.get', + pathParts: { + task_type: params.task_type, + inference_id: params.inference_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Perform inference + * Perform inference on the service * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/post-inference-api.html | Elasticsearch API documentation} */ async inference (this: That, params: T.InferenceInferenceRequest | TB.InferenceInferenceRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -155,11 +170,18 @@ export default class Inference { method = 'POST' path = `/_inference/${encodeURIComponent(params.inference_id.toString())}` } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'inference.inference', + pathParts: { + task_type: params.task_type, + inference_id: params.inference_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Configure an inference endpoint for use in the Inference API + * Create an inference endpoint * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/put-inference-api.html | Elasticsearch API documentation} */ async put (this: That, params: T.InferencePutRequest | TB.InferencePutRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -193,6 +215,13 @@ export default class Inference { method = 'PUT' path = `/_inference/${encodeURIComponent(params.inference_id.toString())}` } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'inference.put', + pathParts: { + task_type: params.task_type, + inference_id: params.inference_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/info.ts b/src/api/api/info.ts index 3eca995..83ce767 100644 --- a/src/api/api/info.ts +++ b/src/api/api/info.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Returns basic information about the cluster. + * Get cluster info. Returns basic information about the cluster. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/index.html | Elasticsearch API documentation} */ export default async function InfoApi (this: That, params?: T.InfoRequest | TB.InfoRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -61,5 +62,8 @@ export default async function InfoApi (this: That, params?: T.InfoRequest | TB.I const method = 'GET' const path = '/' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'info' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/ingest.ts b/src/api/api/ingest.ts index f6c3477..b26c024 100644 --- a/src/api/api/ingest.ts +++ b/src/api/api/ingest.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -44,7 +45,7 @@ export default class Ingest { } /** - * Deletes a pipeline. + * Deletes one or more existing ingest pipeline. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-pipeline-api.html | Elasticsearch API documentation} */ async deletePipeline (this: That, params: T.IngestDeletePipelineRequest | TB.IngestDeletePipelineRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -66,11 +67,17 @@ export default class Ingest { const method = 'DELETE' const path = `/_ingest/pipeline/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ingest.delete_pipeline', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns a pipeline. + * Returns information about one or more ingest pipelines. This API returns a local reference of the pipeline. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/get-pipeline-api.html | Elasticsearch API documentation} */ async getPipeline (this: That, params?: T.IngestGetPipelineRequest | TB.IngestGetPipelineRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -100,11 +107,17 @@ export default class Ingest { method = 'GET' path = '/_ingest/pipeline' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ingest.get_pipeline', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Returns a list of the built-in patterns. + * Extracts structured fields out of a single text field within a document. You choose which field to extract matched fields from, as well as the grok pattern you expect will match. A grok pattern is like a regular expression that supports aliased expressions that can be reused. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/grok-processor.html | Elasticsearch API documentation} */ async processorGrok (this: That, params?: T.IngestProcessorGrokRequest | TB.IngestProcessorGrokRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -127,11 +140,14 @@ export default class Ingest { const method = 'GET' const path = '/_ingest/processor/grok' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ingest.processor_grok' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Creates or updates a pipeline. + * Creates or updates an ingest pipeline. Changes made using this API take effect immediately. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ingest.html | Elasticsearch API documentation} */ async putPipeline (this: That, params: T.IngestPutPipelineRequest | TB.IngestPutPipelineRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -165,11 +181,17 @@ export default class Ingest { const method = 'PUT' const path = `/_ingest/pipeline/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ingest.put_pipeline', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Allows to simulate a pipeline with example documents. + * Executes an ingest pipeline against a set of provided documents. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/simulate-pipeline-api.html | Elasticsearch API documentation} */ async simulate (this: That, params?: T.IngestSimulateRequest | TB.IngestSimulateRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -211,6 +233,12 @@ export default class Ingest { method = body != null ? 'POST' : 'GET' path = '/_ingest/pipeline/_simulate' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ingest.simulate', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/license.ts b/src/api/api/license.ts index 6b99f37..145f870 100644 --- a/src/api/api/license.ts +++ b/src/api/api/license.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -44,7 +45,7 @@ export default class License { } /** - * Retrieves licensing information for the cluster + * Get license information. Returns information about your Elastic license, including its type, its status, when it was issued, and when it expires. For more information about the different types of licenses, refer to [Elastic Stack subscriptions](https://www.elastic.co/subscriptions). * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/get-license.html | Elasticsearch API documentation} */ async get (this: That, params?: T.LicenseGetRequest | TB.LicenseGetRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -67,6 +68,9 @@ export default class License { const method = 'GET' const path = '/_license' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'license.get' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/logstash.ts b/src/api/api/logstash.ts index 9367e30..f92f8c5 100644 --- a/src/api/api/logstash.ts +++ b/src/api/api/logstash.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -44,7 +45,7 @@ export default class Logstash { } /** - * Deletes Logstash Pipelines used by Central Management + * Deletes a pipeline used for Logstash Central Management. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/logstash-api-delete-pipeline.html | Elasticsearch API documentation} */ async deletePipeline (this: That, params: T.LogstashDeletePipelineRequest | TB.LogstashDeletePipelineRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -66,11 +67,17 @@ export default class Logstash { const method = 'DELETE' const path = `/_logstash/pipeline/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'logstash.delete_pipeline', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves Logstash Pipelines used by Central Management + * Retrieves pipelines used for Logstash Central Management. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/logstash-api-get-pipeline.html | Elasticsearch API documentation} */ async getPipeline (this: That, params?: T.LogstashGetPipelineRequest | TB.LogstashGetPipelineRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -100,11 +107,17 @@ export default class Logstash { method = 'GET' path = '/_logstash/pipeline' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'logstash.get_pipeline', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Adds and updates Logstash Pipelines used for Central Management + * Creates or updates a pipeline used for Logstash Central Management. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/logstash-api-put-pipeline.html | Elasticsearch API documentation} */ async putPipeline (this: That, params: T.LogstashPutPipelineRequest | TB.LogstashPutPipelineRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -131,6 +144,12 @@ export default class Logstash { const method = 'PUT' const path = `/_logstash/pipeline/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'logstash.put_pipeline', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/mget.ts b/src/api/api/mget.ts index bf6717a..75ffcae 100644 --- a/src/api/api/mget.ts +++ b/src/api/api/mget.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -80,5 +81,11 @@ export default async function MgetApi (this: That, params?: method = body != null ? 'POST' : 'GET' path = '/_mget' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'mget', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/ml.ts b/src/api/api/ml.ts index cbbc69f..638fe26 100644 --- a/src/api/api/ml.ts +++ b/src/api/api/ml.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -44,7 +45,7 @@ export default class Ml { } /** - * Closes one or more anomaly detection jobs. A job can be opened and closed multiple times throughout its lifecycle. + * Close anomaly detection jobs. A job can be opened and closed multiple times throughout its lifecycle. A closed job cannot receive data or perform analysis operations, but you can still explore and navigate results. When you close a job, it runs housekeeping tasks such as pruning the model history, flushing buffers, calculating final results and persisting the model snapshots. Depending upon the size of the job, it could take several minutes to close and the equivalent time to re-open. After it is closed, the job has a minimal overhead on the cluster except for maintaining its meta data. Therefore it is a best practice to close jobs that are no longer required to process data. If you close an anomaly detection job whose datafeed is running, the request first tries to stop the datafeed. This behavior is equivalent to calling stop datafeed API with the same timeout and force parameters as the close job request. When a datafeed that has a specified end date stops, it automatically closes its associated job. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-close-job.html | Elasticsearch API documentation} */ async closeJob (this: That, params: T.MlCloseJobRequest | TB.MlCloseJobRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -78,11 +79,17 @@ export default class Ml { const method = 'POST' const path = `/_ml/anomaly_detectors/${encodeURIComponent(params.job_id.toString())}/_close` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.close_job', + pathParts: { + job_id: params.job_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Deletes a calendar. + * Removes all scheduled events from a calendar, then deletes it. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-delete-calendar.html | Elasticsearch API documentation} */ async deleteCalendar (this: That, params: T.MlDeleteCalendarRequest | TB.MlDeleteCalendarRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -104,7 +111,13 @@ export default class Ml { const method = 'DELETE' const path = `/_ml/calendars/${encodeURIComponent(params.calendar_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.delete_calendar', + pathParts: { + calendar_id: params.calendar_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -130,7 +143,14 @@ export default class Ml { const method = 'DELETE' const path = `/_ml/calendars/${encodeURIComponent(params.calendar_id.toString())}/events/${encodeURIComponent(params.event_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.delete_calendar_event', + pathParts: { + calendar_id: params.calendar_id, + event_id: params.event_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -156,11 +176,18 @@ export default class Ml { const method = 'DELETE' const path = `/_ml/calendars/${encodeURIComponent(params.calendar_id.toString())}/jobs/${encodeURIComponent(params.job_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.delete_calendar_job', + pathParts: { + calendar_id: params.calendar_id, + job_id: params.job_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Deletes an existing data frame analytics job. + * Deletes a data frame analytics job. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-dfanalytics.html | Elasticsearch API documentation} */ async deleteDataFrameAnalytics (this: That, params: T.MlDeleteDataFrameAnalyticsRequest | TB.MlDeleteDataFrameAnalyticsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -182,7 +209,13 @@ export default class Ml { const method = 'DELETE' const path = `/_ml/data_frame/analytics/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.delete_data_frame_analytics', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -208,11 +241,17 @@ export default class Ml { const method = 'DELETE' const path = `/_ml/datafeeds/${encodeURIComponent(params.datafeed_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.delete_datafeed', + pathParts: { + datafeed_id: params.datafeed_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Deletes a filter. + * Deletes a filter. If an anomaly detection job references the filter, you cannot delete the filter. You must update or delete the job before you can delete the filter. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-delete-filter.html | Elasticsearch API documentation} */ async deleteFilter (this: That, params: T.MlDeleteFilterRequest | TB.MlDeleteFilterRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -234,11 +273,17 @@ export default class Ml { const method = 'DELETE' const path = `/_ml/filters/${encodeURIComponent(params.filter_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.delete_filter', + pathParts: { + filter_id: params.filter_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Deletes an existing anomaly detection job. + * Delete an anomaly detection job. All job configuration, model state and results are deleted. It is not currently possible to delete multiple jobs using wildcards or a comma separated list. If you delete a job that has a datafeed, the request first tries to delete the datafeed. This behavior is equivalent to calling the delete datafeed API with the same timeout and force parameters as the delete job request. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-delete-job.html | Elasticsearch API documentation} */ async deleteJob (this: That, params: T.MlDeleteJobRequest | TB.MlDeleteJobRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -260,7 +305,13 @@ export default class Ml { const method = 'DELETE' const path = `/_ml/anomaly_detectors/${encodeURIComponent(params.job_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.delete_job', + pathParts: { + job_id: params.job_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -286,11 +337,17 @@ export default class Ml { const method = 'DELETE' const path = `/_ml/trained_models/${encodeURIComponent(params.model_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.delete_trained_model', + pathParts: { + model_id: params.model_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Deletes a model alias that refers to the trained model + * Deletes a trained model alias. This API deletes an existing model alias that refers to a trained model. If the model alias is missing or refers to a model other than the one identified by the `model_id`, this API returns an error. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-trained-models-aliases.html | Elasticsearch API documentation} */ async deleteTrainedModelAlias (this: That, params: T.MlDeleteTrainedModelAliasRequest | TB.MlDeleteTrainedModelAliasRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -312,11 +369,18 @@ export default class Ml { const method = 'DELETE' const path = `/_ml/trained_models/${encodeURIComponent(params.model_id.toString())}/model_aliases/${encodeURIComponent(params.model_alias.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.delete_trained_model_alias', + pathParts: { + model_alias: params.model_alias, + model_id: params.model_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Estimates the model memory + * Makes an estimation of the memory usage for an anomaly detection job model. It is based on analysis configuration details for the job and cardinality estimates for the fields it references. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-apis.html | Elasticsearch API documentation} */ async estimateModelMemory (this: That, params?: T.MlEstimateModelMemoryRequest | TB.MlEstimateModelMemoryRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -351,11 +415,14 @@ export default class Ml { const method = 'POST' const path = '/_ml/anomaly_detectors/_estimate_model_memory' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.estimate_model_memory' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Evaluates the data frame analytics for an annotated index. + * Evaluates the data frame analytics for an annotated index. The API packages together commonly used evaluation metrics for various types of machine learning features. This has been designed for use on indexes created by data frame analytics. Evaluation requires both a ground truth field and an analytics result field to be present. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/evaluate-dfanalytics.html | Elasticsearch API documentation} */ async evaluateDataFrame (this: That, params: T.MlEvaluateDataFrameRequest | TB.MlEvaluateDataFrameRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -389,11 +456,14 @@ export default class Ml { const method = 'POST' const path = '/_ml/data_frame/_evaluate' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.evaluate_data_frame' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Forces any buffered data to be processed by the job. + * Forces any buffered data to be processed by the job. The flush jobs API is only applicable when sending data for analysis using the post data API. Depending on the content of the buffer, then it might additionally calculate new results. Both flush and close operations are similar, however the flush is more efficient if you are expecting to send more data for analysis. When flushing, the job remains open and is available to continue analyzing data. A close operation additionally prunes and persists the model state to disk and the job must be opened again before analyzing further data. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-flush-job.html | Elasticsearch API documentation} */ async flushJob (this: That, params: T.MlFlushJobRequest | TB.MlFlushJobRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -427,7 +497,13 @@ export default class Ml { const method = 'POST' const path = `/_ml/anomaly_detectors/${encodeURIComponent(params.job_id.toString())}/_flush` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.flush_job', + pathParts: { + job_id: params.job_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -453,7 +529,13 @@ export default class Ml { const method = 'GET' const path = `/_ml/calendars/${encodeURIComponent(params.calendar_id.toString())}/events` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.get_calendar_events', + pathParts: { + calendar_id: params.calendar_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -499,11 +581,17 @@ export default class Ml { method = body != null ? 'POST' : 'GET' path = '/_ml/calendars' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.get_calendars', + pathParts: { + calendar_id: params.calendar_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves configuration information for data frame analytics jobs. + * Retrieves configuration information for data frame analytics jobs. You can get information for multiple data frame analytics jobs in a single API request by using a comma-separated list of data frame analytics jobs or a wildcard expression. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/get-dfanalytics.html | Elasticsearch API documentation} */ async getDataFrameAnalytics (this: That, params?: T.MlGetDataFrameAnalyticsRequest | TB.MlGetDataFrameAnalyticsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -533,7 +621,13 @@ export default class Ml { method = 'GET' path = '/_ml/data_frame/analytics' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.get_data_frame_analytics', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -567,11 +661,17 @@ export default class Ml { method = 'GET' path = '/_ml/data_frame/analytics/_stats' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.get_data_frame_analytics_stats', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves usage information for datafeeds. + * Retrieves usage information for datafeeds. You can get statistics for multiple datafeeds in a single API request by using a comma-separated list of datafeeds or a wildcard expression. You can get statistics for all datafeeds by using `_all`, by specifying `*` as the ``, or by omitting the ``. If the datafeed is stopped, the only information you receive is the `datafeed_id` and the `state`. This API returns a maximum of 10,000 datafeeds. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-get-datafeed-stats.html | Elasticsearch API documentation} */ async getDatafeedStats (this: That, params?: T.MlGetDatafeedStatsRequest | TB.MlGetDatafeedStatsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -601,11 +701,17 @@ export default class Ml { method = 'GET' path = '/_ml/datafeeds/_stats' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.get_datafeed_stats', + pathParts: { + datafeed_id: params.datafeed_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves configuration information for datafeeds. + * Retrieves configuration information for datafeeds. You can get information for multiple datafeeds in a single API request by using a comma-separated list of datafeeds or a wildcard expression. You can get information for all datafeeds by using `_all`, by specifying `*` as the ``, or by omitting the ``. This API returns a maximum of 10,000 datafeeds. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-get-datafeed.html | Elasticsearch API documentation} */ async getDatafeeds (this: That, params?: T.MlGetDatafeedsRequest | TB.MlGetDatafeedsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -635,11 +741,17 @@ export default class Ml { method = 'GET' path = '/_ml/datafeeds' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.get_datafeeds', + pathParts: { + datafeed_id: params.datafeed_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves filters. + * Retrieves filters. You can get a single filter or all filters. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-get-filter.html | Elasticsearch API documentation} */ async getFilters (this: That, params?: T.MlGetFiltersRequest | TB.MlGetFiltersRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -669,7 +781,13 @@ export default class Ml { method = 'GET' path = '/_ml/filters' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.get_filters', + pathParts: { + filter_id: params.filter_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -703,11 +821,17 @@ export default class Ml { method = 'GET' path = '/_ml/anomaly_detectors/_stats' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.get_job_stats', + pathParts: { + job_id: params.job_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves configuration information for anomaly detection jobs. + * Retrieves configuration information for anomaly detection jobs. You can get information for multiple anomaly detection jobs in a single API request by using a group name, a comma-separated list of jobs, or a wildcard expression. You can get information for all anomaly detection jobs by using `_all`, by specifying `*` as the ``, or by omitting the ``. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-get-job.html | Elasticsearch API documentation} */ async getJobs (this: That, params?: T.MlGetJobsRequest | TB.MlGetJobsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -737,11 +861,17 @@ export default class Ml { method = 'GET' path = '/_ml/anomaly_detectors' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.get_jobs', + pathParts: { + job_id: params.job_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves overall bucket results that summarize the bucket results of multiple anomaly detection jobs. + * Retrieves overall bucket results that summarize the bucket results of multiple anomaly detection jobs. The `overall_score` is calculated by combining the scores of all the buckets within the overall bucket span. First, the maximum `anomaly_score` per anomaly detection job in the overall bucket is calculated. Then the `top_n` of those scores are averaged to result in the `overall_score`. This means that you can fine-tune the `overall_score` so that it is more or less sensitive to the number of jobs that detect an anomaly at the same time. For example, if you set `top_n` to `1`, the `overall_score` is the maximum bucket score in the overall bucket. Alternatively, if you set `top_n` to the number of jobs, the `overall_score` is high only when all jobs detect anomalies in that overall bucket. If you set the `bucket_span` parameter (to a value greater than its default), the `overall_score` is the maximum `overall_score` of the overall buckets that have a span equal to the jobs' largest bucket span. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-get-overall-buckets.html | Elasticsearch API documentation} */ async getOverallBuckets (this: That, params: T.MlGetOverallBucketsRequest | TB.MlGetOverallBucketsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -775,11 +905,17 @@ export default class Ml { const method = body != null ? 'POST' : 'GET' const path = `/_ml/anomaly_detectors/${encodeURIComponent(params.job_id.toString())}/results/overall_buckets` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.get_overall_buckets', + pathParts: { + job_id: params.job_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves configuration information for a trained inference model. + * Retrieves configuration information for a trained model. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/get-trained-models.html | Elasticsearch API documentation} */ async getTrainedModels (this: That, params?: T.MlGetTrainedModelsRequest | TB.MlGetTrainedModelsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -809,11 +945,17 @@ export default class Ml { method = 'GET' path = '/_ml/trained_models' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.get_trained_models', + pathParts: { + model_id: params.model_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves usage information for trained inference models. + * Retrieves usage information for trained models. You can get usage information for multiple trained models in a single API request by using a comma-separated list of model IDs or a wildcard expression. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/get-trained-models-stats.html | Elasticsearch API documentation} */ async getTrainedModelsStats (this: That, params?: T.MlGetTrainedModelsStatsRequest | TB.MlGetTrainedModelsStatsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -843,11 +985,17 @@ export default class Ml { method = 'GET' path = '/_ml/trained_models/_stats' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.get_trained_models_stats', + pathParts: { + model_id: params.model_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Evaluate a trained model. + * Evaluates a trained model. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/infer-trained-model.html | Elasticsearch API documentation} */ async inferTrainedModel (this: That, params: T.MlInferTrainedModelRequest | TB.MlInferTrainedModelRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -881,11 +1029,17 @@ export default class Ml { const method = 'POST' const path = `/_ml/trained_models/${encodeURIComponent(params.model_id.toString())}/_infer` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.infer_trained_model', + pathParts: { + model_id: params.model_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Opens one or more anomaly detection jobs. + * Open anomaly detection jobs. An anomaly detection job must be opened in order for it to be ready to receive and analyze data. It can be opened and closed multiple times throughout its lifecycle. When you open a new job, it starts with an empty model. When you open an existing job, the most recent model state is automatically loaded. The job is ready to resume its analysis from where it left off, once new data is received. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-open-job.html | Elasticsearch API documentation} */ async openJob (this: That, params: T.MlOpenJobRequest | TB.MlOpenJobRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -919,11 +1073,17 @@ export default class Ml { const method = 'POST' const path = `/_ml/anomaly_detectors/${encodeURIComponent(params.job_id.toString())}/_open` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.open_job', + pathParts: { + job_id: params.job_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Posts scheduled events in a calendar. + * Adds scheduled events to a calendar. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-post-calendar-event.html | Elasticsearch API documentation} */ async postCalendarEvents (this: That, params: T.MlPostCalendarEventsRequest | TB.MlPostCalendarEventsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -957,11 +1117,17 @@ export default class Ml { const method = 'POST' const path = `/_ml/calendars/${encodeURIComponent(params.calendar_id.toString())}/events` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.post_calendar_events', + pathParts: { + calendar_id: params.calendar_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Previews that will be analyzed given a data frame analytics config. + * Previews the extracted features used by a data frame analytics config. * @see {@link http://www.elastic.co/guide/en/elasticsearch/reference/master/preview-dfanalytics.html | Elasticsearch API documentation} */ async previewDataFrameAnalytics (this: That, params?: T.MlPreviewDataFrameAnalyticsRequest | TB.MlPreviewDataFrameAnalyticsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1003,11 +1169,17 @@ export default class Ml { method = body != null ? 'POST' : 'GET' path = '/_ml/data_frame/analytics/_preview' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.preview_data_frame_analytics', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Previews a datafeed. + * Previews a datafeed. This API returns the first "page" of search results from a datafeed. You can preview an existing datafeed or provide configuration details for a datafeed and anomaly detection job in the API. The preview shows the structure of the data that will be passed to the anomaly detection engine. IMPORTANT: When Elasticsearch security features are enabled, the preview uses the credentials of the user that called the API. However, when the datafeed starts it uses the roles of the last user that created or updated the datafeed. To get a preview that accurately reflects the behavior of the datafeed, use the appropriate credentials. You can also use secondary authorization headers to supply the credentials. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-preview-datafeed.html | Elasticsearch API documentation} */ async previewDatafeed (this: That, params?: T.MlPreviewDatafeedRequest | TB.MlPreviewDatafeedRequest, options?: TransportRequestOptionsWithOutMeta): Promise> @@ -1049,11 +1221,17 @@ export default class Ml { method = body != null ? 'POST' : 'GET' path = '/_ml/datafeeds/_preview' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.preview_datafeed', + pathParts: { + datafeed_id: params.datafeed_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Instantiates a calendar. + * Creates a calendar. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-put-calendar.html | Elasticsearch API documentation} */ async putCalendar (this: That, params: T.MlPutCalendarRequest | TB.MlPutCalendarRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1087,7 +1265,13 @@ export default class Ml { const method = 'PUT' const path = `/_ml/calendars/${encodeURIComponent(params.calendar_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.put_calendar', + pathParts: { + calendar_id: params.calendar_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -1113,11 +1297,18 @@ export default class Ml { const method = 'PUT' const path = `/_ml/calendars/${encodeURIComponent(params.calendar_id.toString())}/jobs/${encodeURIComponent(params.job_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.put_calendar_job', + pathParts: { + calendar_id: params.calendar_id, + job_id: params.job_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Instantiates a data frame analytics job. + * Instantiates a data frame analytics job. This API creates a data frame analytics job that performs an analysis on the source indices and stores the outcome in a destination index. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/put-dfanalytics.html | Elasticsearch API documentation} */ async putDataFrameAnalytics (this: That, params: T.MlPutDataFrameAnalyticsRequest | TB.MlPutDataFrameAnalyticsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1125,7 +1316,7 @@ export default class Ml { async putDataFrameAnalytics (this: That, params: T.MlPutDataFrameAnalyticsRequest | TB.MlPutDataFrameAnalyticsRequest, options?: TransportRequestOptions): Promise async putDataFrameAnalytics (this: That, params: T.MlPutDataFrameAnalyticsRequest | TB.MlPutDataFrameAnalyticsRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = ['id'] - const acceptedBody: string[] = ['allow_lazy_start', 'analysis', 'analyzed_fields', 'description', 'dest', 'max_num_threads', 'model_memory_limit', 'source'] + const acceptedBody: string[] = ['allow_lazy_start', 'analysis', 'analyzed_fields', 'description', 'dest', 'max_num_threads', 'model_memory_limit', 'source', 'headers', 'version'] const querystring: Record = {} // @ts-expect-error const userBody: any = params?.body @@ -1151,11 +1342,17 @@ export default class Ml { const method = 'PUT' const path = `/_ml/data_frame/analytics/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.put_data_frame_analytics', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Instantiates a datafeed. + * Instantiates a datafeed. Datafeeds retrieve data from Elasticsearch for analysis by an anomaly detection job. You can associate only one datafeed with each anomaly detection job. The datafeed contains a query that runs at a defined interval (`frequency`). If you are concerned about delayed data, you can add a delay (`query_delay') at each interval. When Elasticsearch security features are enabled, your datafeed remembers which roles the user who created it had at the time of creation and runs the query using those same roles. If you provide secondary authorization headers, those credentials are used instead. You must use Kibana, this API, or the create anomaly detection jobs API to create a datafeed. Do not add a datafeed directly to the `.ml-config` index. Do not give users `write` privileges on the `.ml-config` index. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-put-datafeed.html | Elasticsearch API documentation} */ async putDatafeed (this: That, params: T.MlPutDatafeedRequest | TB.MlPutDatafeedRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1163,7 +1360,7 @@ export default class Ml { async putDatafeed (this: That, params: T.MlPutDatafeedRequest | TB.MlPutDatafeedRequest, options?: TransportRequestOptions): Promise async putDatafeed (this: That, params: T.MlPutDatafeedRequest | TB.MlPutDatafeedRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = ['datafeed_id'] - const acceptedBody: string[] = ['aggregations', 'chunking_config', 'delayed_data_check_config', 'frequency', 'indices', 'indexes', 'indices_options', 'job_id', 'max_empty_searches', 'query', 'query_delay', 'runtime_mappings', 'script_fields', 'scroll_size'] + const acceptedBody: string[] = ['aggregations', 'chunking_config', 'delayed_data_check_config', 'frequency', 'indices', 'indexes', 'indices_options', 'job_id', 'max_empty_searches', 'query', 'query_delay', 'runtime_mappings', 'script_fields', 'scroll_size', 'headers'] const querystring: Record = {} // @ts-expect-error const userBody: any = params?.body @@ -1189,11 +1386,17 @@ export default class Ml { const method = 'PUT' const path = `/_ml/datafeeds/${encodeURIComponent(params.datafeed_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.put_datafeed', + pathParts: { + datafeed_id: params.datafeed_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Instantiates a filter. + * Instantiates a filter. A filter contains a list of strings. It can be used by one or more anomaly detection jobs. Specifically, filters are referenced in the `custom_rules` property of detector configuration objects. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-put-filter.html | Elasticsearch API documentation} */ async putFilter (this: That, params: T.MlPutFilterRequest | TB.MlPutFilterRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1227,11 +1430,17 @@ export default class Ml { const method = 'PUT' const path = `/_ml/filters/${encodeURIComponent(params.filter_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.put_filter', + pathParts: { + filter_id: params.filter_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Instantiates an anomaly detection job. + * Create an anomaly detection job. If you include a `datafeed_config`, you must have read index privileges on the source index. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-put-job.html | Elasticsearch API documentation} */ async putJob (this: That, params: T.MlPutJobRequest | TB.MlPutJobRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1265,11 +1474,17 @@ export default class Ml { const method = 'PUT' const path = `/_ml/anomaly_detectors/${encodeURIComponent(params.job_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.put_job', + pathParts: { + job_id: params.job_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Creates an inference trained model. + * Enables you to supply a trained model that is not created by data frame analytics. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/put-trained-models.html | Elasticsearch API documentation} */ async putTrainedModel (this: That, params: T.MlPutTrainedModelRequest | TB.MlPutTrainedModelRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1277,7 +1492,7 @@ export default class Ml { async putTrainedModel (this: That, params: T.MlPutTrainedModelRequest | TB.MlPutTrainedModelRequest, options?: TransportRequestOptions): Promise async putTrainedModel (this: That, params: T.MlPutTrainedModelRequest | TB.MlPutTrainedModelRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = ['model_id'] - const acceptedBody: string[] = ['compressed_definition', 'definition', 'description', 'inference_config', 'input', 'metadata', 'model_type', 'model_size_bytes', 'platform_architecture', 'tags'] + const acceptedBody: string[] = ['compressed_definition', 'definition', 'description', 'inference_config', 'input', 'metadata', 'model_type', 'model_size_bytes', 'platform_architecture', 'tags', 'prefix_strings'] const querystring: Record = {} // @ts-expect-error const userBody: any = params?.body @@ -1303,11 +1518,17 @@ export default class Ml { const method = 'PUT' const path = `/_ml/trained_models/${encodeURIComponent(params.model_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.put_trained_model', + pathParts: { + model_id: params.model_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Creates a new model alias (or reassigns an existing one) to refer to the trained model + * Creates or updates a trained model alias. A trained model alias is a logical name used to reference a single trained model. You can use aliases instead of trained model identifiers to make it easier to reference your models. For example, you can use aliases in inference aggregations and processors. An alias must be unique and refer to only a single trained model. However, you can have multiple aliases for each trained model. If you use this API to update an alias such that it references a different trained model ID and the model uses a different type of data frame analytics, an error occurs. For example, this situation occurs if you have a trained model for regression analysis and a trained model for classification analysis; you cannot reassign an alias from one type of trained model to another. If you use this API to update an alias and there are very few input fields in common between the old and new trained models for the model alias, the API returns a warning. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/put-trained-models-aliases.html | Elasticsearch API documentation} */ async putTrainedModelAlias (this: That, params: T.MlPutTrainedModelAliasRequest | TB.MlPutTrainedModelAliasRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1329,11 +1550,18 @@ export default class Ml { const method = 'PUT' const path = `/_ml/trained_models/${encodeURIComponent(params.model_id.toString())}/model_aliases/${encodeURIComponent(params.model_alias.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.put_trained_model_alias', + pathParts: { + model_alias: params.model_alias, + model_id: params.model_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Creates part of a trained model definition + * Creates part of a trained model definition. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/put-trained-model-definition-part.html | Elasticsearch API documentation} */ async putTrainedModelDefinitionPart (this: That, params: T.MlPutTrainedModelDefinitionPartRequest | TB.MlPutTrainedModelDefinitionPartRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1367,11 +1595,18 @@ export default class Ml { const method = 'PUT' const path = `/_ml/trained_models/${encodeURIComponent(params.model_id.toString())}/definition/${encodeURIComponent(params.part.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.put_trained_model_definition_part', + pathParts: { + model_id: params.model_id, + part: params.part + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Creates a trained model vocabulary + * Creates a trained model vocabulary. This API is supported only for natural language processing (NLP) models. The vocabulary is stored in the index as described in `inference_config.*.vocabulary` of the trained model definition. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/put-trained-model-vocabulary.html | Elasticsearch API documentation} */ async putTrainedModelVocabulary (this: That, params: T.MlPutTrainedModelVocabularyRequest | TB.MlPutTrainedModelVocabularyRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1379,7 +1614,7 @@ export default class Ml { async putTrainedModelVocabulary (this: That, params: T.MlPutTrainedModelVocabularyRequest | TB.MlPutTrainedModelVocabularyRequest, options?: TransportRequestOptions): Promise async putTrainedModelVocabulary (this: That, params: T.MlPutTrainedModelVocabularyRequest | TB.MlPutTrainedModelVocabularyRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = ['model_id'] - const acceptedBody: string[] = ['vocabulary'] + const acceptedBody: string[] = ['vocabulary', 'merges', 'scores'] const querystring: Record = {} // @ts-expect-error const userBody: any = params?.body @@ -1405,11 +1640,17 @@ export default class Ml { const method = 'PUT' const path = `/_ml/trained_models/${encodeURIComponent(params.model_id.toString())}/vocabulary` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.put_trained_model_vocabulary', + pathParts: { + model_id: params.model_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Resets an existing anomaly detection job. + * Resets an anomaly detection job. All model state and results are deleted. The job is ready to start over as if it had just been created. It is not currently possible to reset multiple jobs using wildcards or a comma separated list. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-reset-job.html | Elasticsearch API documentation} */ async resetJob (this: That, params: T.MlResetJobRequest | TB.MlResetJobRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1431,11 +1672,17 @@ export default class Ml { const method = 'POST' const path = `/_ml/anomaly_detectors/${encodeURIComponent(params.job_id.toString())}/_reset` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.reset_job', + pathParts: { + job_id: params.job_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Starts a data frame analytics job. + * Starts a data frame analytics job. A data frame analytics job can be started and stopped multiple times throughout its lifecycle. If the destination index does not exist, it is created automatically the first time you start the data frame analytics job. The `index.number_of_shards` and `index.number_of_replicas` settings for the destination index are copied from the source index. If there are multiple source indices, the destination index copies the highest setting values. The mappings for the destination index are also copied from the source indices. If there are any mapping conflicts, the job fails to start. If the destination index exists, it is used as is. You can therefore set up the destination index in advance with custom settings and mappings. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/start-dfanalytics.html | Elasticsearch API documentation} */ async startDataFrameAnalytics (this: That, params: T.MlStartDataFrameAnalyticsRequest | TB.MlStartDataFrameAnalyticsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1457,11 +1704,17 @@ export default class Ml { const method = 'POST' const path = `/_ml/data_frame/analytics/${encodeURIComponent(params.id.toString())}/_start` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.start_data_frame_analytics', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Starts one or more datafeeds. + * Starts one or more datafeeds. A datafeed must be started in order to retrieve data from Elasticsearch. A datafeed can be started and stopped multiple times throughout its lifecycle. Before you can start a datafeed, the anomaly detection job must be open. Otherwise, an error occurs. If you restart a stopped datafeed, it continues processing input data from the next millisecond after it was stopped. If new data was indexed for that exact millisecond between stopping and starting, it will be ignored. When Elasticsearch security features are enabled, your datafeed remembers which roles the last user to create or update it had at the time of creation or update and runs the query using those same roles. If you provided secondary authorization headers when you created or updated the datafeed, those credentials are used instead. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-start-datafeed.html | Elasticsearch API documentation} */ async startDatafeed (this: That, params: T.MlStartDatafeedRequest | TB.MlStartDatafeedRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1495,11 +1748,17 @@ export default class Ml { const method = 'POST' const path = `/_ml/datafeeds/${encodeURIComponent(params.datafeed_id.toString())}/_start` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.start_datafeed', + pathParts: { + datafeed_id: params.datafeed_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Start a trained model deployment. + * Starts a trained model deployment, which allocates the model to every machine learning node. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/start-trained-model-deployment.html | Elasticsearch API documentation} */ async startTrainedModelDeployment (this: That, params: T.MlStartTrainedModelDeploymentRequest | TB.MlStartTrainedModelDeploymentRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1521,11 +1780,17 @@ export default class Ml { const method = 'POST' const path = `/_ml/trained_models/${encodeURIComponent(params.model_id.toString())}/deployment/_start` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.start_trained_model_deployment', + pathParts: { + model_id: params.model_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Stops one or more data frame analytics jobs. + * Stops one or more data frame analytics jobs. A data frame analytics job can be started and stopped multiple times throughout its lifecycle. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/stop-dfanalytics.html | Elasticsearch API documentation} */ async stopDataFrameAnalytics (this: That, params: T.MlStopDataFrameAnalyticsRequest | TB.MlStopDataFrameAnalyticsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1547,11 +1812,17 @@ export default class Ml { const method = 'POST' const path = `/_ml/data_frame/analytics/${encodeURIComponent(params.id.toString())}/_stop` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.stop_data_frame_analytics', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Stops one or more datafeeds. + * Stops one or more datafeeds. A datafeed that is stopped ceases to retrieve data from Elasticsearch. A datafeed can be started and stopped multiple times throughout its lifecycle. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-stop-datafeed.html | Elasticsearch API documentation} */ async stopDatafeed (this: That, params: T.MlStopDatafeedRequest | TB.MlStopDatafeedRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1585,11 +1856,17 @@ export default class Ml { const method = 'POST' const path = `/_ml/datafeeds/${encodeURIComponent(params.datafeed_id.toString())}/_stop` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.stop_datafeed', + pathParts: { + datafeed_id: params.datafeed_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Stop a trained model deployment. + * Stops a trained model deployment. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/stop-trained-model-deployment.html | Elasticsearch API documentation} */ async stopTrainedModelDeployment (this: That, params: T.MlStopTrainedModelDeploymentRequest | TB.MlStopTrainedModelDeploymentRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1611,11 +1888,17 @@ export default class Ml { const method = 'POST' const path = `/_ml/trained_models/${encodeURIComponent(params.model_id.toString())}/deployment/_stop` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.stop_trained_model_deployment', + pathParts: { + model_id: params.model_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Updates certain properties of a data frame analytics job. + * Updates an existing data frame analytics job. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/update-dfanalytics.html | Elasticsearch API documentation} */ async updateDataFrameAnalytics (this: That, params: T.MlUpdateDataFrameAnalyticsRequest | TB.MlUpdateDataFrameAnalyticsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1649,11 +1932,17 @@ export default class Ml { const method = 'POST' const path = `/_ml/data_frame/analytics/${encodeURIComponent(params.id.toString())}/_update` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.update_data_frame_analytics', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Updates certain properties of a datafeed. + * Updates the properties of a datafeed. You must stop and start the datafeed for the changes to be applied. When Elasticsearch security features are enabled, your datafeed remembers which roles the user who updated it had at the time of the update and runs the query using those same roles. If you provide secondary authorization headers, those credentials are used instead. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-update-datafeed.html | Elasticsearch API documentation} */ async updateDatafeed (this: That, params: T.MlUpdateDatafeedRequest | TB.MlUpdateDatafeedRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1687,11 +1976,17 @@ export default class Ml { const method = 'POST' const path = `/_ml/datafeeds/${encodeURIComponent(params.datafeed_id.toString())}/_update` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.update_datafeed', + pathParts: { + datafeed_id: params.datafeed_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Updates the description of a filter, adds items, or removes items. + * Updates the description of a filter, adds items, or removes items from the list. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/ml-update-filter.html | Elasticsearch API documentation} */ async updateFilter (this: That, params: T.MlUpdateFilterRequest | TB.MlUpdateFilterRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1725,7 +2020,13 @@ export default class Ml { const method = 'POST' const path = `/_ml/filters/${encodeURIComponent(params.filter_id.toString())}/_update` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.update_filter', + pathParts: { + filter_id: params.filter_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -1763,11 +2064,17 @@ export default class Ml { const method = 'POST' const path = `/_ml/anomaly_detectors/${encodeURIComponent(params.job_id.toString())}/_update` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.update_job', + pathParts: { + job_id: params.job_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Updates certain properties of trained model deployment. + * Starts a trained model deployment, which allocates the model to every machine learning node. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/update-trained-model-deployment.html | Elasticsearch API documentation} */ async updateTrainedModelDeployment (this: That, params: T.MlUpdateTrainedModelDeploymentRequest | TB.MlUpdateTrainedModelDeploymentRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -1801,6 +2108,12 @@ export default class Ml { const method = 'POST' const path = `/_ml/trained_models/${encodeURIComponent(params.model_id.toString())}/deployment/_update` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ml.update_trained_model_deployment', + pathParts: { + model_id: params.model_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/msearch.ts b/src/api/api/msearch.ts index b799c54..5d5fbc9 100644 --- a/src/api/api/msearch.ts +++ b/src/api/api/msearch.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -72,5 +73,11 @@ export default async function MsearchApi> (this: That, params: T.MsearchTemplateRequest | TB.MsearchTemplateRequest, options?: TransportRequestOptionsWithOutMeta): Promise> @@ -72,5 +73,11 @@ export default async function MsearchTemplateApi @@ -60,5 +61,11 @@ export default async function OpenPointInTimeApi (this: That, params: T.OpenPoin const method = 'POST' const path = `/${encodeURIComponent(params.index.toString())}/_pit` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'open_point_in_time', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/ping.ts b/src/api/api/ping.ts index b91a22f..9d07552 100644 --- a/src/api/api/ping.ts +++ b/src/api/api/ping.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Returns whether the cluster is running. + * Ping the cluster. Returns whether the cluster is running. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/index.html | Elasticsearch API documentation} */ export default async function PingApi (this: That, params?: T.PingRequest | TB.PingRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -61,5 +62,8 @@ export default async function PingApi (this: That, params?: T.PingRequest | TB.P const method = 'HEAD' const path = '/' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'ping' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/put_script.ts b/src/api/api/put_script.ts index 36ea03b..94c3449 100644 --- a/src/api/api/put_script.ts +++ b/src/api/api/put_script.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Creates or updates a script. + * Create or update a script or search template. Creates or updates a stored script or search template. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/modules-scripting.html | Elasticsearch API documentation} */ export default async function PutScriptApi (this: That, params: T.PutScriptRequest | TB.PutScriptRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -79,5 +80,12 @@ export default async function PutScriptApi (this: That, params: T.PutScriptReque method = 'PUT' path = `/_scripts/${encodeURIComponent(params.id.toString())}` } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'put_script', + pathParts: { + id: params.id, + context: params.context + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/query_rules.ts b/src/api/api/query_rules.ts new file mode 100644 index 0000000..1a090d5 --- /dev/null +++ b/src/api/api/query_rules.ts @@ -0,0 +1,295 @@ +/* + * Licensed to Elasticsearch B.V. under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch B.V. licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* eslint-disable import/export */ +/* eslint-disable @typescript-eslint/no-misused-new */ +/* eslint-disable @typescript-eslint/no-extraneous-class */ +/* eslint-disable @typescript-eslint/no-unused-vars */ + +// This file was automatically generated by elastic/elastic-client-generator-js +// DO NOT MODIFY IT BY HAND. Instead, modify the source open api file, +// and elastic/elastic-client-generator-js to regenerate this file again. + +import { + Transport, + TransportRequestMetadata, + TransportRequestOptions, + TransportRequestOptionsWithMeta, + TransportRequestOptionsWithOutMeta, + TransportResult +} from '@elastic/transport' +import * as T from '../types' +import * as TB from '../typesWithBodyKey' +interface That { transport: Transport } + +export default class QueryRules { + transport: Transport + constructor (transport: Transport) { + this.transport = transport + } + + /** + * Deletes a query rule within a query ruleset. + * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-query-rule.html | Elasticsearch API documentation} + */ + async deleteRule (this: That, params: T.QueryRulesDeleteRuleRequest | TB.QueryRulesDeleteRuleRequest, options?: TransportRequestOptionsWithOutMeta): Promise + async deleteRule (this: That, params: T.QueryRulesDeleteRuleRequest | TB.QueryRulesDeleteRuleRequest, options?: TransportRequestOptionsWithMeta): Promise> + async deleteRule (this: That, params: T.QueryRulesDeleteRuleRequest | TB.QueryRulesDeleteRuleRequest, options?: TransportRequestOptions): Promise + async deleteRule (this: That, params: T.QueryRulesDeleteRuleRequest | TB.QueryRulesDeleteRuleRequest, options?: TransportRequestOptions): Promise { + const acceptedPath: string[] = ['ruleset_id', 'rule_id'] + const querystring: Record = {} + const body = undefined + + for (const key in params) { + if (acceptedPath.includes(key)) { + continue + } else if (key !== 'body') { + // @ts-expect-error + querystring[key] = params[key] + } + } + + const method = 'DELETE' + const path = `/_query_rules/${encodeURIComponent(params.ruleset_id.toString())}/_rule/${encodeURIComponent(params.rule_id.toString())}` + const meta: TransportRequestMetadata = { + name: 'query_rules.delete_rule', + pathParts: { + ruleset_id: params.ruleset_id, + rule_id: params.rule_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) + } + + /** + * Deletes a query ruleset. + * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-query-ruleset.html | Elasticsearch API documentation} + */ + async deleteRuleset (this: That, params: T.QueryRulesDeleteRulesetRequest | TB.QueryRulesDeleteRulesetRequest, options?: TransportRequestOptionsWithOutMeta): Promise + async deleteRuleset (this: That, params: T.QueryRulesDeleteRulesetRequest | TB.QueryRulesDeleteRulesetRequest, options?: TransportRequestOptionsWithMeta): Promise> + async deleteRuleset (this: That, params: T.QueryRulesDeleteRulesetRequest | TB.QueryRulesDeleteRulesetRequest, options?: TransportRequestOptions): Promise + async deleteRuleset (this: That, params: T.QueryRulesDeleteRulesetRequest | TB.QueryRulesDeleteRulesetRequest, options?: TransportRequestOptions): Promise { + const acceptedPath: string[] = ['ruleset_id'] + const querystring: Record = {} + const body = undefined + + for (const key in params) { + if (acceptedPath.includes(key)) { + continue + } else if (key !== 'body') { + // @ts-expect-error + querystring[key] = params[key] + } + } + + const method = 'DELETE' + const path = `/_query_rules/${encodeURIComponent(params.ruleset_id.toString())}` + const meta: TransportRequestMetadata = { + name: 'query_rules.delete_ruleset', + pathParts: { + ruleset_id: params.ruleset_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) + } + + /** + * Returns the details about a query rule within a query ruleset + * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/get-query-rule.html | Elasticsearch API documentation} + */ + async getRule (this: That, params: T.QueryRulesGetRuleRequest | TB.QueryRulesGetRuleRequest, options?: TransportRequestOptionsWithOutMeta): Promise + async getRule (this: That, params: T.QueryRulesGetRuleRequest | TB.QueryRulesGetRuleRequest, options?: TransportRequestOptionsWithMeta): Promise> + async getRule (this: That, params: T.QueryRulesGetRuleRequest | TB.QueryRulesGetRuleRequest, options?: TransportRequestOptions): Promise + async getRule (this: That, params: T.QueryRulesGetRuleRequest | TB.QueryRulesGetRuleRequest, options?: TransportRequestOptions): Promise { + const acceptedPath: string[] = ['ruleset_id', 'rule_id'] + const querystring: Record = {} + const body = undefined + + for (const key in params) { + if (acceptedPath.includes(key)) { + continue + } else if (key !== 'body') { + // @ts-expect-error + querystring[key] = params[key] + } + } + + const method = 'GET' + const path = `/_query_rules/${encodeURIComponent(params.ruleset_id.toString())}/_rule/${encodeURIComponent(params.rule_id.toString())}` + const meta: TransportRequestMetadata = { + name: 'query_rules.get_rule', + pathParts: { + ruleset_id: params.ruleset_id, + rule_id: params.rule_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) + } + + /** + * Returns the details about a query ruleset + * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/get-query-ruleset.html | Elasticsearch API documentation} + */ + async getRuleset (this: That, params: T.QueryRulesGetRulesetRequest | TB.QueryRulesGetRulesetRequest, options?: TransportRequestOptionsWithOutMeta): Promise + async getRuleset (this: That, params: T.QueryRulesGetRulesetRequest | TB.QueryRulesGetRulesetRequest, options?: TransportRequestOptionsWithMeta): Promise> + async getRuleset (this: That, params: T.QueryRulesGetRulesetRequest | TB.QueryRulesGetRulesetRequest, options?: TransportRequestOptions): Promise + async getRuleset (this: That, params: T.QueryRulesGetRulesetRequest | TB.QueryRulesGetRulesetRequest, options?: TransportRequestOptions): Promise { + const acceptedPath: string[] = ['ruleset_id'] + const querystring: Record = {} + const body = undefined + + for (const key in params) { + if (acceptedPath.includes(key)) { + continue + } else if (key !== 'body') { + // @ts-expect-error + querystring[key] = params[key] + } + } + + const method = 'GET' + const path = `/_query_rules/${encodeURIComponent(params.ruleset_id.toString())}` + const meta: TransportRequestMetadata = { + name: 'query_rules.get_ruleset', + pathParts: { + ruleset_id: params.ruleset_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) + } + + /** + * Returns summarized information about existing query rulesets. + * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/list-query-rulesets.html | Elasticsearch API documentation} + */ + async listRulesets (this: That, params?: T.QueryRulesListRulesetsRequest | TB.QueryRulesListRulesetsRequest, options?: TransportRequestOptionsWithOutMeta): Promise + async listRulesets (this: That, params?: T.QueryRulesListRulesetsRequest | TB.QueryRulesListRulesetsRequest, options?: TransportRequestOptionsWithMeta): Promise> + async listRulesets (this: That, params?: T.QueryRulesListRulesetsRequest | TB.QueryRulesListRulesetsRequest, options?: TransportRequestOptions): Promise + async listRulesets (this: That, params?: T.QueryRulesListRulesetsRequest | TB.QueryRulesListRulesetsRequest, options?: TransportRequestOptions): Promise { + const acceptedPath: string[] = [] + const querystring: Record = {} + const body = undefined + + params = params ?? {} + for (const key in params) { + if (acceptedPath.includes(key)) { + continue + } else if (key !== 'body') { + // @ts-expect-error + querystring[key] = params[key] + } + } + + const method = 'GET' + const path = '/_query_rules' + const meta: TransportRequestMetadata = { + name: 'query_rules.list_rulesets' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) + } + + /** + * Creates or updates a query rule within a query ruleset. + * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/put-query-rule.html | Elasticsearch API documentation} + */ + async putRule (this: That, params: T.QueryRulesPutRuleRequest | TB.QueryRulesPutRuleRequest, options?: TransportRequestOptionsWithOutMeta): Promise + async putRule (this: That, params: T.QueryRulesPutRuleRequest | TB.QueryRulesPutRuleRequest, options?: TransportRequestOptionsWithMeta): Promise> + async putRule (this: That, params: T.QueryRulesPutRuleRequest | TB.QueryRulesPutRuleRequest, options?: TransportRequestOptions): Promise + async putRule (this: That, params: T.QueryRulesPutRuleRequest | TB.QueryRulesPutRuleRequest, options?: TransportRequestOptions): Promise { + const acceptedPath: string[] = ['ruleset_id', 'rule_id'] + const acceptedBody: string[] = ['type', 'criteria', 'actions', 'priority'] + const querystring: Record = {} + // @ts-expect-error + const userBody: any = params?.body + let body: Record | string + if (typeof userBody === 'string') { + body = userBody + } else { + body = userBody != null ? { ...userBody } : undefined + } + + for (const key in params) { + if (acceptedBody.includes(key)) { + body = body ?? {} + // @ts-expect-error + body[key] = params[key] + } else if (acceptedPath.includes(key)) { + continue + } else if (key !== 'body') { + // @ts-expect-error + querystring[key] = params[key] + } + } + + const method = 'PUT' + const path = `/_query_rules/${encodeURIComponent(params.ruleset_id.toString())}/_rule/${encodeURIComponent(params.rule_id.toString())}` + const meta: TransportRequestMetadata = { + name: 'query_rules.put_rule', + pathParts: { + ruleset_id: params.ruleset_id, + rule_id: params.rule_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) + } + + /** + * Creates or updates a query ruleset. + * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/put-query-ruleset.html | Elasticsearch API documentation} + */ + async putRuleset (this: That, params: T.QueryRulesPutRulesetRequest | TB.QueryRulesPutRulesetRequest, options?: TransportRequestOptionsWithOutMeta): Promise + async putRuleset (this: That, params: T.QueryRulesPutRulesetRequest | TB.QueryRulesPutRulesetRequest, options?: TransportRequestOptionsWithMeta): Promise> + async putRuleset (this: That, params: T.QueryRulesPutRulesetRequest | TB.QueryRulesPutRulesetRequest, options?: TransportRequestOptions): Promise + async putRuleset (this: That, params: T.QueryRulesPutRulesetRequest | TB.QueryRulesPutRulesetRequest, options?: TransportRequestOptions): Promise { + const acceptedPath: string[] = ['ruleset_id'] + const acceptedBody: string[] = ['rules'] + const querystring: Record = {} + // @ts-expect-error + const userBody: any = params?.body + let body: Record | string + if (typeof userBody === 'string') { + body = userBody + } else { + body = userBody != null ? { ...userBody } : undefined + } + + for (const key in params) { + if (acceptedBody.includes(key)) { + body = body ?? {} + // @ts-expect-error + body[key] = params[key] + } else if (acceptedPath.includes(key)) { + continue + } else if (key !== 'body') { + // @ts-expect-error + querystring[key] = params[key] + } + } + + const method = 'PUT' + const path = `/_query_rules/${encodeURIComponent(params.ruleset_id.toString())}` + const meta: TransportRequestMetadata = { + name: 'query_rules.put_ruleset', + pathParts: { + ruleset_id: params.ruleset_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) + } +} diff --git a/src/api/api/query_ruleset.ts b/src/api/api/query_ruleset.ts deleted file mode 100644 index 771205b..0000000 --- a/src/api/api/query_ruleset.ts +++ /dev/null @@ -1,162 +0,0 @@ -/* - * Licensed to Elasticsearch B.V. under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch B.V. licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* eslint-disable import/export */ -/* eslint-disable @typescript-eslint/no-misused-new */ -/* eslint-disable @typescript-eslint/no-extraneous-class */ -/* eslint-disable @typescript-eslint/no-unused-vars */ - -// This file was automatically generated by elastic/elastic-client-generator-js -// DO NOT MODIFY IT BY HAND. Instead, modify the source open api file, -// and elastic/elastic-client-generator-js to regenerate this file again. - -import { - Transport, - TransportRequestOptions, - TransportRequestOptionsWithMeta, - TransportRequestOptionsWithOutMeta, - TransportResult -} from '@elastic/transport' -import * as T from '../types' -import * as TB from '../typesWithBodyKey' -interface That { transport: Transport } - -export default class QueryRuleset { - transport: Transport - constructor (transport: Transport) { - this.transport = transport - } - - /** - * Deletes a query ruleset. - * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-query-ruleset.html | Elasticsearch API documentation} - */ - async delete (this: That, params: T.QueryRulesetDeleteRequest | TB.QueryRulesetDeleteRequest, options?: TransportRequestOptionsWithOutMeta): Promise - async delete (this: That, params: T.QueryRulesetDeleteRequest | TB.QueryRulesetDeleteRequest, options?: TransportRequestOptionsWithMeta): Promise> - async delete (this: That, params: T.QueryRulesetDeleteRequest | TB.QueryRulesetDeleteRequest, options?: TransportRequestOptions): Promise - async delete (this: That, params: T.QueryRulesetDeleteRequest | TB.QueryRulesetDeleteRequest, options?: TransportRequestOptions): Promise { - const acceptedPath: string[] = ['ruleset_id'] - const querystring: Record = {} - const body = undefined - - for (const key in params) { - if (acceptedPath.includes(key)) { - continue - } else if (key !== 'body') { - // @ts-expect-error - querystring[key] = params[key] - } - } - - const method = 'DELETE' - const path = `/_query_rules/${encodeURIComponent(params.ruleset_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) - } - - /** - * Returns the details about a query ruleset. - * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/get-query-ruleset.html | Elasticsearch API documentation} - */ - async get (this: That, params: T.QueryRulesetGetRequest | TB.QueryRulesetGetRequest, options?: TransportRequestOptionsWithOutMeta): Promise - async get (this: That, params: T.QueryRulesetGetRequest | TB.QueryRulesetGetRequest, options?: TransportRequestOptionsWithMeta): Promise> - async get (this: That, params: T.QueryRulesetGetRequest | TB.QueryRulesetGetRequest, options?: TransportRequestOptions): Promise - async get (this: That, params: T.QueryRulesetGetRequest | TB.QueryRulesetGetRequest, options?: TransportRequestOptions): Promise { - const acceptedPath: string[] = ['ruleset_id'] - const querystring: Record = {} - const body = undefined - - for (const key in params) { - if (acceptedPath.includes(key)) { - continue - } else if (key !== 'body') { - // @ts-expect-error - querystring[key] = params[key] - } - } - - const method = 'GET' - const path = `/_query_rules/${encodeURIComponent(params.ruleset_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) - } - - /** - * Lists query rulesets. - * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/list-query-rulesets.html | Elasticsearch API documentation} - */ - async list (this: That, params?: T.QueryRulesetListRequest | TB.QueryRulesetListRequest, options?: TransportRequestOptionsWithOutMeta): Promise - async list (this: That, params?: T.QueryRulesetListRequest | TB.QueryRulesetListRequest, options?: TransportRequestOptionsWithMeta): Promise> - async list (this: That, params?: T.QueryRulesetListRequest | TB.QueryRulesetListRequest, options?: TransportRequestOptions): Promise - async list (this: That, params?: T.QueryRulesetListRequest | TB.QueryRulesetListRequest, options?: TransportRequestOptions): Promise { - const acceptedPath: string[] = [] - const querystring: Record = {} - const body = undefined - - params = params ?? {} - for (const key in params) { - if (acceptedPath.includes(key)) { - continue - } else if (key !== 'body') { - // @ts-expect-error - querystring[key] = params[key] - } - } - - const method = 'GET' - const path = '/_query_rules' - return await this.transport.request({ path, method, querystring, body }, options) - } - - /** - * Creates or updates a query ruleset. - * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/put-query-ruleset.html | Elasticsearch API documentation} - */ - async put (this: That, params: T.QueryRulesetPutRequest | TB.QueryRulesetPutRequest, options?: TransportRequestOptionsWithOutMeta): Promise - async put (this: That, params: T.QueryRulesetPutRequest | TB.QueryRulesetPutRequest, options?: TransportRequestOptionsWithMeta): Promise> - async put (this: That, params: T.QueryRulesetPutRequest | TB.QueryRulesetPutRequest, options?: TransportRequestOptions): Promise - async put (this: That, params: T.QueryRulesetPutRequest | TB.QueryRulesetPutRequest, options?: TransportRequestOptions): Promise { - const acceptedPath: string[] = ['ruleset_id'] - const acceptedBody: string[] = ['rules'] - const querystring: Record = {} - // @ts-expect-error - const userBody: any = params?.body - let body: Record | string - if (typeof userBody === 'string') { - body = userBody - } else { - body = userBody != null ? { ...userBody } : undefined - } - - for (const key in params) { - if (acceptedBody.includes(key)) { - body = body ?? {} - // @ts-expect-error - body[key] = params[key] - } else if (acceptedPath.includes(key)) { - continue - } else if (key !== 'body') { - // @ts-expect-error - querystring[key] = params[key] - } - } - - const method = 'PUT' - const path = `/_query_rules/${encodeURIComponent(params.ruleset_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) - } -} diff --git a/src/api/api/rank_eval.ts b/src/api/api/rank_eval.ts index 5e0e1c2..010a984 100644 --- a/src/api/api/rank_eval.ts +++ b/src/api/api/rank_eval.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Allows to evaluate the quality of ranked search results over a set of typical search queries + * Enables you to evaluate the quality of ranked search results over a set of typical search queries. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/search-rank-eval.html | Elasticsearch API documentation} */ export default async function RankEvalApi (this: That, params: T.RankEvalRequest | TB.RankEvalRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -79,5 +80,11 @@ export default async function RankEvalApi (this: That, params: T.RankEvalRequest method = body != null ? 'POST' : 'GET' path = '/_rank_eval' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'rank_eval', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/reindex.ts b/src/api/api/reindex.ts index 17790d7..69d23a4 100644 --- a/src/api/api/reindex.ts +++ b/src/api/api/reindex.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Allows to copy documents from one index to another, optionally filtering the source documents by a query, changing the destination index settings, or fetching the documents from a remote cluster. + * Reindex documents. Copies documents from a source to a destination. The source can be any existing index, alias, or data stream. The destination must differ from the source. For example, you cannot reindex a data stream into itself. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-reindex.html | Elasticsearch API documentation} */ export default async function ReindexApi (this: That, params: T.ReindexRequest | TB.ReindexRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -72,5 +73,8 @@ export default async function ReindexApi (this: That, params: T.ReindexRequest | const method = 'POST' const path = '/_reindex' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'reindex' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/render_search_template.ts b/src/api/api/render_search_template.ts index ef14b73..cd31ab4 100644 --- a/src/api/api/render_search_template.ts +++ b/src/api/api/render_search_template.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Allows to use the Mustache language to pre-render a search definition. + * Renders a search template as a search request body. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/render-search-template-api.html | Elasticsearch API documentation} */ export default async function RenderSearchTemplateApi (this: That, params?: T.RenderSearchTemplateRequest | TB.RenderSearchTemplateRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -80,5 +81,11 @@ export default async function RenderSearchTemplateApi (this: That, params?: T.Re method = body != null ? 'POST' : 'GET' path = '/_render/template' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'render_search_template', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/scripts_painless_execute.ts b/src/api/api/scripts_painless_execute.ts index e27a599..a1a9fa0 100644 --- a/src/api/api/scripts_painless_execute.ts +++ b/src/api/api/scripts_painless_execute.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Allows an arbitrary script to be executed and a result to be returned + * Run a script. Runs a script and returns a result. * @see {@link https://www.elastic.co/guide/en/elasticsearch/painless/master/painless-execute-api.html | Elasticsearch API documentation} */ export default async function ScriptsPainlessExecuteApi (this: That, params?: T.ScriptsPainlessExecuteRequest | TB.ScriptsPainlessExecuteRequest, options?: TransportRequestOptionsWithOutMeta): Promise> @@ -73,5 +74,8 @@ export default async function ScriptsPainlessExecuteApi (this const method = body != null ? 'POST' : 'GET' const path = '/_scripts/painless/_execute' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'scripts_painless_execute' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/scroll.ts b/src/api/api/scroll.ts index 77e2917..13b86d8 100644 --- a/src/api/api/scroll.ts +++ b/src/api/api/scroll.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -72,5 +73,11 @@ export default async function ScrollApi> (this: That, params?: T.SearchRequest | TB.SearchRequest, options?: TransportRequestOptionsWithOutMeta): Promise> @@ -46,7 +47,7 @@ export default async function SearchApi> (this: That, params?: T.SearchRequest | TB.SearchRequest, options?: TransportRequestOptions): Promise> export default async function SearchApi> (this: That, params?: T.SearchRequest | TB.SearchRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = ['index'] - const acceptedBody: string[] = ['aggregations', 'aggs', 'collapse', 'explain', 'ext', 'from', 'highlight', 'track_total_hits', 'indices_boost', 'docvalue_fields', 'min_score', 'post_filter', 'profile', 'query', 'rescore', 'script_fields', 'search_after', 'size', 'slice', 'sort', '_source', 'fields', 'suggest', 'terminate_after', 'timeout', 'track_scores', 'version', 'seq_no_primary_term', 'stored_fields', 'pit', 'runtime_mappings', 'stats'] + const acceptedBody: string[] = ['aggregations', 'aggs', 'collapse', 'explain', 'ext', 'from', 'highlight', 'track_total_hits', 'indices_boost', 'docvalue_fields', 'knn', 'rank', 'min_score', 'post_filter', 'profile', 'query', 'rescore', 'retriever', 'script_fields', 'search_after', 'size', 'slice', 'sort', '_source', 'fields', 'suggest', 'terminate_after', 'timeout', 'track_scores', 'version', 'seq_no_primary_term', 'stored_fields', 'pit', 'runtime_mappings', 'stats'] const querystring: Record = {} // @ts-expect-error const userBody: any = params?.body @@ -86,5 +87,11 @@ export default async function SearchApi @@ -118,7 +131,13 @@ export default class SearchApplication { const method = 'GET' const path = `/_application/search_application/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'search_application.get', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -152,7 +171,13 @@ export default class SearchApplication { method = 'GET' path = '/_application/analytics' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'search_application.get_behavioral_analytics', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -179,7 +204,10 @@ export default class SearchApplication { const method = 'GET' const path = '/_application/search_application' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'search_application.list' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -210,7 +238,13 @@ export default class SearchApplication { const method = 'PUT' const path = `/_application/search_application/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'search_application.put', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -236,11 +270,17 @@ export default class SearchApplication { const method = 'PUT' const path = `/_application/analytics/${encodeURIComponent(params.name.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'search_application.put_behavioral_analytics', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Perform a search against a search application + * Perform a search against a search application. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/search-application-search.html | Elasticsearch API documentation} */ async search> (this: That, params: T.SearchApplicationSearchRequest | TB.SearchApplicationSearchRequest, options?: TransportRequestOptionsWithOutMeta): Promise> @@ -274,6 +314,12 @@ export default class SearchApplication { const method = body != null ? 'POST' : 'GET' const path = `/_application/search_application/${encodeURIComponent(params.name.toString())}/_search` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'search_application.search', + pathParts: { + name: params.name + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/search_mvt.ts b/src/api/api/search_mvt.ts index 5d3fbdb..6d2f125 100644 --- a/src/api/api/search_mvt.ts +++ b/src/api/api/search_mvt.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Searches a vector tile for geospatial values. Returns results as a binary Mapbox vector tile. + * Search a vector tile. Searches a vector tile for geospatial values. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/search-vector-tile-api.html | Elasticsearch API documentation} */ export default async function SearchMvtApi (this: That, params: T.SearchMvtRequest | TB.SearchMvtRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -72,5 +73,15 @@ export default async function SearchMvtApi (this: That, params: T.SearchMvtReque const method = body != null ? 'POST' : 'GET' const path = `/${encodeURIComponent(params.index.toString())}/_mvt/${encodeURIComponent(params.field.toString())}/${encodeURIComponent(params.zoom.toString())}/${encodeURIComponent(params.x.toString())}/${encodeURIComponent(params.y.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'search_mvt', + pathParts: { + index: params.index, + field: params.field, + zoom: params.zoom, + x: params.x, + y: params.y + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/search_template.ts b/src/api/api/search_template.ts index f8a5c35..a158ad5 100644 --- a/src/api/api/search_template.ts +++ b/src/api/api/search_template.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Allows to use the Mustache language to pre-render a search definition. + * Runs a search with a search template. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/search-template.html | Elasticsearch API documentation} */ export default async function SearchTemplateApi (this: That, params?: T.SearchTemplateRequest | TB.SearchTemplateRequest, options?: TransportRequestOptionsWithOutMeta): Promise> @@ -80,5 +81,11 @@ export default async function SearchTemplateApi (this: That method = body != null ? 'POST' : 'GET' path = '/_search/template' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'search_template', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/security.ts b/src/api/api/security.ts index 2b38401..2f03c0e 100644 --- a/src/api/api/security.ts +++ b/src/api/api/security.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -44,7 +45,7 @@ export default class Security { } /** - * Enables authentication as a user and retrieve information about the authenticated user. + * Authenticate a user. Authenticates a user and returns information about the authenticated user. Include the user information in a [basic auth header](https://en.wikipedia.org/wiki/Basic_access_authentication). A successful call returns a JSON structure that shows user information such as their username, the roles that are assigned to the user, any assigned metadata, and information about the realms that authenticated and authorized the user. If the user cannot be authenticated, this API returns a 401 status code. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/security-api-authenticate.html | Elasticsearch API documentation} */ async authenticate (this: That, params?: T.SecurityAuthenticateRequest | TB.SecurityAuthenticateRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -67,11 +68,14 @@ export default class Security { const method = 'GET' const path = '/_security/_authenticate' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'security.authenticate' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Creates an API key for access without requiring basic authentication. + * Create an API key. Creates an API key for access without requiring basic authentication. A successful request returns a JSON structure that contains the API key, its unique id, and its name. If applicable, it also returns expiration information for the API key in milliseconds. NOTE: By default, API keys never expire. You can specify expiration information when you create the API keys. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/security-api-create-api-key.html | Elasticsearch API documentation} */ async createApiKey (this: That, params?: T.SecurityCreateApiKeyRequest | TB.SecurityCreateApiKeyRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -79,7 +83,7 @@ export default class Security { async createApiKey (this: That, params?: T.SecurityCreateApiKeyRequest | TB.SecurityCreateApiKeyRequest, options?: TransportRequestOptions): Promise async createApiKey (this: That, params?: T.SecurityCreateApiKeyRequest | TB.SecurityCreateApiKeyRequest, options?: TransportRequestOptions): Promise { const acceptedPath: string[] = [] - const acceptedBody: string[] = ['expiration', 'name', 'role_descriptors'] + const acceptedBody: string[] = ['expiration', 'name', 'role_descriptors', 'metadata'] const querystring: Record = {} // @ts-expect-error const userBody: any = params?.body @@ -106,11 +110,14 @@ export default class Security { const method = 'PUT' const path = '/_security/api_key' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'security.create_api_key' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves information for one or more API keys. + * Get API key information. Retrieves information for one or more API keys. NOTE: If you have only the `manage_own_api_key` privilege, this API returns only the API keys that you own. If you have `read_security`, `manage_api_key` or greater privileges (including `manage_security`), this API returns all API keys regardless of ownership. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/security-api-get-api-key.html | Elasticsearch API documentation} */ async getApiKey (this: That, params?: T.SecurityGetApiKeyRequest | TB.SecurityGetApiKeyRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -133,11 +140,14 @@ export default class Security { const method = 'GET' const path = '/_security/api_key' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'security.get_api_key' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Determines whether the specified user has a specified list of privileges. + * Check user privileges. Determines whether the specified user has a specified list of privileges. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/security-api-has-privileges.html | Elasticsearch API documentation} */ async hasPrivileges (this: That, params?: T.SecurityHasPrivilegesRequest | TB.SecurityHasPrivilegesRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -179,11 +189,17 @@ export default class Security { method = body != null ? 'POST' : 'GET' path = '/_security/user/_has_privileges' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'security.has_privileges', + pathParts: { + user: params.user + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Invalidates one or more API keys. + * Invalidate API keys. Invalidates one or more API keys. The `manage_api_key` privilege allows deleting any API keys. The `manage_own_api_key` only allows deleting API keys that are owned by the user. In addition, with the `manage_own_api_key` privilege, an invalidation request must be issued in one of the three formats: - Set the parameter `owner=true`. - Or, set both `username` and `realm_name` to match the user’s identity. - Or, if the request is issued by an API key, i.e. an API key invalidates itself, specify its ID in the `ids` field. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/security-api-invalidate-api-key.html | Elasticsearch API documentation} */ async invalidateApiKey (this: That, params?: T.SecurityInvalidateApiKeyRequest | TB.SecurityInvalidateApiKeyRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -218,11 +234,14 @@ export default class Security { const method = 'DELETE' const path = '/_security/api_key' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'security.invalidate_api_key' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves information for API keys using a subset of query DSL + * Query API keys. Retrieves a paginated list of API keys and their information. You can optionally filter the results with a query. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/security-api-query-api-key.html | Elasticsearch API documentation} */ async queryApiKeys (this: That, params?: T.SecurityQueryApiKeysRequest | TB.SecurityQueryApiKeysRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -257,11 +276,14 @@ export default class Security { const method = body != null ? 'POST' : 'GET' const path = '/_security/_query/api_key' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'security.query_api_keys' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Updates attributes of an existing API key. + * Update an API key. Updates attributes of an existing API key. Users can only update API keys that they created or that were granted to them. Use this API to update API keys created by the create API Key or grant API Key APIs. If you need to apply the same update to many API keys, you can use bulk update API Keys to reduce overhead. It’s not possible to update expired API keys, or API keys that have been invalidated by invalidate API Key. This API supports updates to an API key’s access scope and metadata. The access scope of an API key is derived from the `role_descriptors` you specify in the request, and a snapshot of the owner user’s permissions at the time of the request. The snapshot of the owner’s permissions is updated automatically on every call. If you don’t specify `role_descriptors` in the request, a call to this API might still change the API key’s access scope. This change can occur if the owner user’s permissions have changed since the API key was created or last modified. To update another user’s API key, use the `run_as` feature to submit a request on behalf of another user. IMPORTANT: It’s not possible to use an API key as the authentication credential for this API. To update an API key, the owner user’s credentials are required. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/security-api-update-api-key.html | Elasticsearch API documentation} */ async updateApiKey (this: That, params: T.SecurityUpdateApiKeyRequest | TB.SecurityUpdateApiKeyRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -295,6 +317,12 @@ export default class Security { const method = 'PUT' const path = `/_security/api_key/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'security.update_api_key', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/sql.ts b/src/api/api/sql.ts index 05e5975..fbd30e8 100644 --- a/src/api/api/sql.ts +++ b/src/api/api/sql.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -78,7 +79,10 @@ export default class Sql { const method = 'POST' const path = '/_sql/close' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'sql.clear_cursor' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -104,7 +108,13 @@ export default class Sql { const method = 'DELETE' const path = `/_sql/async/delete/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'sql.delete_async', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -130,7 +140,13 @@ export default class Sql { const method = 'GET' const path = `/_sql/async/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'sql.get_async', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -156,7 +172,13 @@ export default class Sql { const method = 'GET' const path = `/_sql/async/status/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'sql.get_async_status', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -195,7 +217,10 @@ export default class Sql { const method = body != null ? 'POST' : 'GET' const path = '/_sql' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'sql.query' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -233,6 +258,9 @@ export default class Sql { const method = body != null ? 'POST' : 'GET' const path = '/_sql/translate' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'sql.translate' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/synonyms.ts b/src/api/api/synonyms.ts index 80ba965..abbf987 100644 --- a/src/api/api/synonyms.ts +++ b/src/api/api/synonyms.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -66,7 +67,13 @@ export default class Synonyms { const method = 'DELETE' const path = `/_synonyms/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'synonyms.delete_synonym', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -92,7 +99,14 @@ export default class Synonyms { const method = 'DELETE' const path = `/_synonyms/${encodeURIComponent(params.set_id.toString())}/${encodeURIComponent(params.rule_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'synonyms.delete_synonym_rule', + pathParts: { + set_id: params.set_id, + rule_id: params.rule_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -118,7 +132,13 @@ export default class Synonyms { const method = 'GET' const path = `/_synonyms/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'synonyms.get_synonym', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -144,7 +164,14 @@ export default class Synonyms { const method = 'GET' const path = `/_synonyms/${encodeURIComponent(params.set_id.toString())}/${encodeURIComponent(params.rule_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'synonyms.get_synonym_rule', + pathParts: { + set_id: params.set_id, + rule_id: params.rule_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -171,11 +198,14 @@ export default class Synonyms { const method = 'GET' const path = '/_synonyms' - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'synonyms.get_synonyms_sets' + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Creates or updates a synonyms set + * Creates or updates a synonym set. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/put-synonyms-set.html | Elasticsearch API documentation} */ async putSynonym (this: That, params: T.SynonymsPutSynonymRequest | TB.SynonymsPutSynonymRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -209,7 +239,13 @@ export default class Synonyms { const method = 'PUT' const path = `/_synonyms/${encodeURIComponent(params.id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'synonyms.put_synonym', + pathParts: { + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** @@ -247,6 +283,13 @@ export default class Synonyms { const method = 'PUT' const path = `/_synonyms/${encodeURIComponent(params.set_id.toString())}/${encodeURIComponent(params.rule_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'synonyms.put_synonym_rule', + pathParts: { + set_id: params.set_id, + rule_id: params.rule_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/tasks.ts b/src/api/api/tasks.ts index f4706ed..d72360e 100644 --- a/src/api/api/tasks.ts +++ b/src/api/api/tasks.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -44,7 +45,7 @@ export default class Tasks { } /** - * Returns information about a task. + * Get task information. Returns information about the tasks currently executing in the cluster. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/tasks.html | Elasticsearch API documentation} */ async get (this: That, params: T.TasksGetRequest | TB.TasksGetRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -66,6 +67,12 @@ export default class Tasks { const method = 'GET' const path = `/_tasks/${encodeURIComponent(params.task_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'tasks.get', + pathParts: { + task_id: params.task_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/terms_enum.ts b/src/api/api/terms_enum.ts index db54a8c..1dd51ec 100644 --- a/src/api/api/terms_enum.ts +++ b/src/api/api/terms_enum.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -72,5 +73,11 @@ export default async function TermsEnumApi (this: That, params: T.TermsEnumReque const method = body != null ? 'POST' : 'GET' const path = `/${encodeURIComponent(params.index.toString())}/_terms_enum` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'terms_enum', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/termvectors.ts b/src/api/api/termvectors.ts index 6ffee1f..3e8c120 100644 --- a/src/api/api/termvectors.ts +++ b/src/api/api/termvectors.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Returns information and statistics about terms in the fields of a particular document. + * Get term vector information. Returns information and statistics about terms in the fields of a particular document. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-termvectors.html | Elasticsearch API documentation} */ export default async function TermvectorsApi (this: That, params: T.TermvectorsRequest | TB.TermvectorsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -79,5 +80,12 @@ export default async function TermvectorsApi (this: That, p method = body != null ? 'POST' : 'GET' path = `/${encodeURIComponent(params.index.toString())}/_termvectors` } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'termvectors', + pathParts: { + index: params.index, + id: params.id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/api/transform.ts b/src/api/api/transform.ts index b2c8675..475f5c9 100644 --- a/src/api/api/transform.ts +++ b/src/api/api/transform.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -44,7 +45,7 @@ export default class Transform { } /** - * Deletes an existing transform. + * Delete a transform. Deletes a transform. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-transform.html | Elasticsearch API documentation} */ async deleteTransform (this: That, params: T.TransformDeleteTransformRequest | TB.TransformDeleteTransformRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -66,11 +67,17 @@ export default class Transform { const method = 'DELETE' const path = `/_transform/${encodeURIComponent(params.transform_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'transform.delete_transform', + pathParts: { + transform_id: params.transform_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves configuration information for transforms. + * Get transforms. Retrieves configuration information for transforms. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/get-transform.html | Elasticsearch API documentation} */ async getTransform (this: That, params?: T.TransformGetTransformRequest | TB.TransformGetTransformRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -100,11 +107,17 @@ export default class Transform { method = 'GET' path = '/_transform' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'transform.get_transform', + pathParts: { + transform_id: params.transform_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Retrieves usage information for transforms. + * Get transform stats. Retrieves usage information for transforms. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/get-transform-stats.html | Elasticsearch API documentation} */ async getTransformStats (this: That, params: T.TransformGetTransformStatsRequest | TB.TransformGetTransformStatsRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -126,11 +139,17 @@ export default class Transform { const method = 'GET' const path = `/_transform/${encodeURIComponent(params.transform_id.toString())}/_stats` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'transform.get_transform_stats', + pathParts: { + transform_id: params.transform_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Previews a transform. + * Preview a transform. Generates a preview of the results that you will get when you create a transform with the same configuration. It returns a maximum of 100 results. The calculations are based on all the current data in the source index. It also generates a list of mappings and settings for the destination index. These values are determined based on the field types of the source index and the transform aggregations. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/preview-transform.html | Elasticsearch API documentation} */ async previewTransform (this: That, params?: T.TransformPreviewTransformRequest | TB.TransformPreviewTransformRequest, options?: TransportRequestOptionsWithOutMeta): Promise> @@ -172,11 +191,17 @@ export default class Transform { method = body != null ? 'POST' : 'GET' path = '/_transform/_preview' } - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'transform.preview_transform', + pathParts: { + transform_id: params.transform_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Instantiates a transform. + * Create a transform. Creates a transform. A transform copies data from source indices, transforms it, and persists it into an entity-centric destination index. You can also think of the destination index as a two-dimensional tabular data structure (known as a data frame). The ID for each document in the data frame is generated from a hash of the entity, so there is a unique row per entity. You must choose either the latest or pivot method for your transform; you cannot use both in a single transform. If you choose to use the pivot method for your transform, the entities are defined by the set of `group_by` fields in the pivot object. If you choose to use the latest method, the entities are defined by the `unique_key` field values in the latest object. You must have `create_index`, `index`, and `read` privileges on the destination index and `read` and `view_index_metadata` privileges on the source indices. When Elasticsearch security features are enabled, the transform remembers which roles the user that created it had at the time of creation and uses those same roles. If those roles do not have the required privileges on the source and destination indices, the transform fails when it attempts unauthorized operations. NOTE: You must use Kibana or this API to create a transform. Do not add a transform directly into any `.transform-internal*` indices using the Elasticsearch index API. If Elasticsearch security features are enabled, do not give users any privileges on `.transform-internal*` indices. If you used transforms prior to 7.5, also do not give users any privileges on `.data-frame-internal*` indices. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/put-transform.html | Elasticsearch API documentation} */ async putTransform (this: That, params: T.TransformPutTransformRequest | TB.TransformPutTransformRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -210,11 +235,17 @@ export default class Transform { const method = 'PUT' const path = `/_transform/${encodeURIComponent(params.transform_id.toString())}` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'transform.put_transform', + pathParts: { + transform_id: params.transform_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Resets an existing transform. + * Reset a transform. Resets a transform. Before you can reset it, you must stop it; alternatively, use the `force` query parameter. If the destination index was created by the transform, it is deleted. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/reset-transform.html | Elasticsearch API documentation} */ async resetTransform (this: That, params: T.TransformResetTransformRequest | TB.TransformResetTransformRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -236,11 +267,17 @@ export default class Transform { const method = 'POST' const path = `/_transform/${encodeURIComponent(params.transform_id.toString())}/_reset` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'transform.reset_transform', + pathParts: { + transform_id: params.transform_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Schedules now a transform. + * Schedule a transform to start now. Instantly runs a transform to process data. If you _schedule_now a transform, it will process the new data instantly, without waiting for the configured frequency interval. After _schedule_now API is called, the transform will be processed again at now + frequency unless _schedule_now API is called again in the meantime. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/schedule-now-transform.html | Elasticsearch API documentation} */ async scheduleNowTransform (this: That, params: T.TransformScheduleNowTransformRequest | TB.TransformScheduleNowTransformRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -262,11 +299,17 @@ export default class Transform { const method = 'POST' const path = `/_transform/${encodeURIComponent(params.transform_id.toString())}/_schedule_now` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'transform.schedule_now_transform', + pathParts: { + transform_id: params.transform_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Starts one or more transforms. + * Start a transform. Starts a transform. When you start a transform, it creates the destination index if it does not already exist. The `number_of_shards` is set to `1` and the `auto_expand_replicas` is set to `0-1`. If it is a pivot transform, it deduces the mapping definitions for the destination index from the source indices and the transform aggregations. If fields in the destination index are derived from scripts (as in the case of `scripted_metric` or `bucket_script` aggregations), the transform uses dynamic mappings unless an index template exists. If it is a latest transform, it does not deduce mapping definitions; it uses dynamic mappings. To use explicit mappings, create the destination index before you start the transform. Alternatively, you can create an index template, though it does not affect the deduced mappings in a pivot transform. When the transform starts, a series of validations occur to ensure its success. If you deferred validation when you created the transform, they occur when you start the transform—with the exception of privilege checks. When Elasticsearch security features are enabled, the transform remembers which roles the user that created it had at the time of creation and uses those same roles. If those roles do not have the required privileges on the source and destination indices, the transform fails when it attempts unauthorized operations. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/start-transform.html | Elasticsearch API documentation} */ async startTransform (this: That, params: T.TransformStartTransformRequest | TB.TransformStartTransformRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -288,11 +331,17 @@ export default class Transform { const method = 'POST' const path = `/_transform/${encodeURIComponent(params.transform_id.toString())}/_start` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'transform.start_transform', + pathParts: { + transform_id: params.transform_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Stops one or more transforms. + * Stop transforms. Stops one or more transforms. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/stop-transform.html | Elasticsearch API documentation} */ async stopTransform (this: That, params: T.TransformStopTransformRequest | TB.TransformStopTransformRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -314,11 +363,17 @@ export default class Transform { const method = 'POST' const path = `/_transform/${encodeURIComponent(params.transform_id.toString())}/_stop` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'transform.stop_transform', + pathParts: { + transform_id: params.transform_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } /** - * Updates certain properties of a transform. + * Update a transform. Updates certain properties of a transform. All updated properties except `description` do not take effect until after the transform starts the next checkpoint, thus there is data consistency in each checkpoint. To use this API, you must have `read` and `view_index_metadata` privileges for the source indices. You must also have `index` and `read` privileges for the destination index. When Elasticsearch security features are enabled, the transform remembers which roles the user who updated it had at the time of update and runs with those privileges. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/update-transform.html | Elasticsearch API documentation} */ async updateTransform (this: That, params: T.TransformUpdateTransformRequest | TB.TransformUpdateTransformRequest, options?: TransportRequestOptionsWithOutMeta): Promise @@ -352,6 +407,12 @@ export default class Transform { const method = 'POST' const path = `/_transform/${encodeURIComponent(params.transform_id.toString())}/_update` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'transform.update_transform', + pathParts: { + transform_id: params.transform_id + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } } diff --git a/src/api/api/update.ts b/src/api/api/update.ts index d15e007..0dd6f42 100644 --- a/src/api/api/update.ts +++ b/src/api/api/update.ts @@ -28,6 +28,7 @@ import { Transport, + TransportRequestMetadata, TransportRequestOptions, TransportRequestOptionsWithMeta, TransportRequestOptionsWithOutMeta, @@ -38,7 +39,7 @@ import * as TB from '../typesWithBodyKey' interface That { transport: Transport } /** - * Updates a document with a script or partial document. + * Update a document. Updates a document by running a script or passing a partial document. * @see {@link https://www.elastic.co/guide/en/elasticsearch/reference/master/docs-update.html | Elasticsearch API documentation} */ export default async function UpdateApi (this: That, params: T.UpdateRequest | TB.UpdateRequest, options?: TransportRequestOptionsWithOutMeta): Promise> @@ -72,5 +73,12 @@ export default async function UpdateApi @@ -72,5 +73,11 @@ export default async function UpdateByQueryApi (this: That, params: T.UpdateByQu const method = 'POST' const path = `/${encodeURIComponent(params.index.toString())}/_update_by_query` - return await this.transport.request({ path, method, querystring, body }, options) + const meta: TransportRequestMetadata = { + name: 'update_by_query', + pathParts: { + index: params.index + } + } + return await this.transport.request({ path, method, querystring, body, meta }, options) } diff --git a/src/api/index.ts b/src/api/index.ts index 0579c25..8f7d03c 100644 --- a/src/api/index.ts +++ b/src/api/index.ts @@ -63,7 +63,7 @@ import mtermvectorsApi from './api/mtermvectors' import openPointInTimeApi from './api/open_point_in_time' import pingApi from './api/ping' import putScriptApi from './api/put_script' -import QueryRulesetApi from './api/query_ruleset' +import QueryRulesApi from './api/query_rules' import rankEvalApi from './api/rank_eval' import reindexApi from './api/reindex' import renderSearchTemplateApi from './api/render_search_template' @@ -122,7 +122,7 @@ export default interface API { openPointInTime: typeof openPointInTimeApi ping: typeof pingApi putScript: typeof putScriptApi - queryRuleset: QueryRulesetApi + queryRules: QueryRulesApi rankEval: typeof rankEvalApi reindex: typeof reindexApi renderSearchTemplate: typeof renderSearchTemplateApi @@ -156,7 +156,7 @@ const kIngest = Symbol('Ingest') const kLicense = Symbol('License') const kLogstash = Symbol('Logstash') const kMl = Symbol('Ml') -const kQueryRuleset = Symbol('QueryRuleset') +const kQueryRules = Symbol('QueryRules') const kSearchApplication = Symbol('SearchApplication') const kSecurity = Symbol('Security') const kSql = Symbol('Sql') @@ -178,7 +178,7 @@ export default class API { [kLicense]: symbol | null [kLogstash]: symbol | null [kMl]: symbol | null - [kQueryRuleset]: symbol | null + [kQueryRules]: symbol | null [kSearchApplication]: symbol | null [kSecurity]: symbol | null [kSql]: symbol | null @@ -199,7 +199,7 @@ export default class API { this[kLicense] = null this[kLogstash] = null this[kMl] = null - this[kQueryRuleset] = null + this[kQueryRules] = null this[kSearchApplication] = null this[kSecurity] = null this[kSql] = null @@ -286,8 +286,8 @@ Object.defineProperties(API.prototype, { ml: { get () { return this[kMl] === null ? (this[kMl] = new MlApi(this.transport)) : this[kMl] } }, - queryRuleset: { - get () { return this[kQueryRuleset] === null ? (this[kQueryRuleset] = new QueryRulesetApi(this.transport)) : this[kQueryRuleset] } + queryRules: { + get () { return this[kQueryRules] === null ? (this[kQueryRules] = new QueryRulesApi(this.transport)) : this[kQueryRules] } }, searchApplication: { get () { return this[kSearchApplication] === null ? (this[kSearchApplication] = new SearchApplicationApi(this.transport)) : this[kSearchApplication] } diff --git a/src/api/types.ts b/src/api/types.ts index 8182233..bb095d6 100644 --- a/src/api/types.ts +++ b/src/api/types.ts @@ -95,7 +95,7 @@ export interface BulkUpdateAction stored_fields?: Fields docvalue_fields?: (QueryDslFieldAndFormat | Field)[] + knn?: KnnSearch | KnnSearch[] from?: integer highlight?: SearchHighlight indices_boost?: Record[] @@ -835,7 +847,7 @@ export interface MtermvectorsResponse { } export interface MtermvectorsTermVectorsResult { - _id: Id + _id?: Id _index: IndexName _version?: VersionNumber took?: long @@ -937,7 +949,7 @@ export interface RankEvalRankEvalQuery { export interface RankEvalRankEvalRequestItem { id: Id - request?: RankEvalRankEvalQuery + request?: RankEvalRankEvalQuery | QueryDslQueryContainer ratings: RankEvalDocumentRating[] template_id?: Id params?: Record @@ -993,7 +1005,7 @@ export interface ReindexRequest extends RequestBase { conflicts?: Conflicts dest: ReindexDestination max_docs?: long - script?: Script + script?: Script | string size?: long source: ReindexSource } @@ -1090,7 +1102,7 @@ export interface ScriptsPainlessExecutePainlessContextSetup { export interface ScriptsPainlessExecuteRequest extends RequestBase { context?: string context_setup?: ScriptsPainlessExecutePainlessContextSetup - script?: InlineScript | string + script?: Script | string } export interface ScriptsPainlessExecuteResponse { @@ -1147,11 +1159,14 @@ export interface SearchRequest extends RequestBase { track_total_hits?: SearchTrackHits indices_boost?: Record[] docvalue_fields?: (QueryDslFieldAndFormat | Field)[] + knn?: KnnSearch | KnnSearch[] + rank?: RankContainer min_score?: double post_filter?: QueryDslQueryContainer profile?: boolean query?: QueryDslQueryContainer rescore?: SearchRescore | SearchRescore[] + retriever?: RetrieverContainer script_fields?: Record search_after?: SortResults size?: integer @@ -1411,6 +1426,7 @@ export interface SearchHit { _node?: string _routing?: string _source?: TDocument + _rank?: integer _seq_no?: SequenceNumber _primary_term?: long _version?: VersionNumber @@ -1734,6 +1750,7 @@ export interface SearchTemplateRequest extends RequestBase { routing?: Routing scroll?: Duration search_type?: SearchType + rest_total_hits_as_int?: boolean typed_keys?: boolean explain?: boolean id?: Id @@ -1813,7 +1830,7 @@ export interface TermvectorsRequest extends RequestBase { export interface TermvectorsResponse { found: boolean - _id: Id + _id?: Id _index: IndexName term_vectors?: Record took: long @@ -1829,7 +1846,7 @@ export interface TermvectorsTerm { } export interface TermvectorsTermVector { - field_statistics: TermvectorsFieldStatistics + field_statistics?: TermvectorsFieldStatistics terms: Record } @@ -1857,7 +1874,7 @@ export interface UpdateRequest detect_noop?: boolean doc?: TPartialDocument doc_as_upsert?: boolean - script?: Script + script?: Script | string scripted_upsert?: boolean _source?: SearchSourceConfig upsert?: TDocument @@ -1901,7 +1918,7 @@ export interface UpdateByQueryRequest extends RequestBase { wait_for_completion?: boolean max_docs?: long query?: QueryDslQueryContainer - script?: Script + script?: Script | string slice?: SlicedScroll conflicts?: Conflicts } @@ -2158,9 +2175,10 @@ export interface GeoDistanceSortKeys { ignore_unmapped?: boolean order?: SortOrder unit?: DistanceUnit + nested?: NestedSortValue } export type GeoDistanceSort = GeoDistanceSortKeys -& { [property: string]: GeoLocation | GeoLocation[] | SortMode | GeoDistanceType | boolean | SortOrder | DistanceUnit } +& { [property: string]: GeoLocation | GeoLocation[] | SortMode | GeoDistanceType | boolean | SortOrder | DistanceUnit | NestedSortValue } export type GeoDistanceType = 'arc' | 'plane' @@ -2266,12 +2284,6 @@ export interface InlineGetKeys { export type InlineGet = InlineGetKeys & { [property: string]: any } -export interface InlineScript extends ScriptBase { - lang?: ScriptLanguage - options?: Record - source: string -} - export type Ip = string export interface KnnQuery extends QueryDslQueryBase { @@ -2362,6 +2374,7 @@ export interface NodeAttributes { name: NodeName transport_address: TransportAddress roles?: NodeRoles + external_id?: string } export type NodeId = string @@ -2504,7 +2517,7 @@ export type Routing = string export interface RrfRank { rank_constant?: long - window_size?: long + rank_window_size?: long } export type ScalarValue = long | double | string | boolean | null @@ -2513,14 +2526,16 @@ export interface ScoreSort { order?: SortOrder } -export type Script = InlineScript | string | StoredScriptId - -export interface ScriptBase { +export interface Script { + source?: string + id?: Id params?: Record + lang?: ScriptLanguage + options?: Record } export interface ScriptField { - script: Script + script: Script | string ignore_failure?: boolean } @@ -2528,7 +2543,7 @@ export type ScriptLanguage = 'painless' | 'expression' | 'mustache' | 'java' | s export interface ScriptSort { order?: SortOrder - script: Script + script: Script | string type?: ScriptSortType mode?: SortMode nested?: NestedSortValue @@ -2679,10 +2694,6 @@ export interface StoredScript { source: string } -export interface StoredScriptId extends ScriptBase { - id: Id -} - export type SuggestMode = 'missing' | 'popular' | 'always' export type SuggestionName = string @@ -2836,7 +2847,10 @@ export interface AggregationsAggregationContainer { bucket_script?: AggregationsBucketScriptAggregation bucket_selector?: AggregationsBucketSelectorAggregation bucket_sort?: AggregationsBucketSortAggregation + bucket_count_ks_test?: AggregationsBucketKsAggregation + bucket_correlation?: AggregationsBucketCorrelationAggregation cardinality?: AggregationsCardinalityAggregation + categorize_text?: AggregationsCategorizeTextAggregation children?: AggregationsChildrenAggregation composite?: AggregationsCompositeAggregation cumulative_cardinality?: AggregationsCumulativeCardinalityAggregation @@ -2927,7 +2941,7 @@ export interface AggregationsAutoDateHistogramAggregation extends AggregationsBu missing?: DateTime offset?: string params?: Record - script?: Script + script?: Script | string time_zone?: TimeZone } @@ -2997,11 +3011,11 @@ export interface AggregationsBucketPathAggregation { } export interface AggregationsBucketScriptAggregation extends AggregationsPipelineAggregationBase { - script?: Script + script?: Script | string } export interface AggregationsBucketSelectorAggregation extends AggregationsPipelineAggregationBase { - script?: Script + script?: Script | string } export interface AggregationsBucketSortAggregation { @@ -3015,7 +3029,7 @@ export type AggregationsBuckets = Record | T export type AggregationsBucketsPath = string | string[] | Record -export type AggregationsCalendarInterval = 'second' | '1s' | 'minute' | '1m' | 'hour' | '1h' | 'day' | '1d' | 'week' | '1w' | 'month' | '1M' | 'quarter' | '1q' | 'year' | '1Y' +export type AggregationsCalendarInterval = 'second' | '1s' | 'minute' | '1m' | 'hour' | '1h' | 'day' | '1d' | 'week' | '1w' | 'month' | '1M' | 'quarter' | '1q' | 'year' | '1y' export interface AggregationsCardinalityAggregate extends AggregationsAggregateBase { value: long @@ -3074,7 +3088,7 @@ export interface AggregationsCompositeAggregationBase { field?: Field missing_bucket?: boolean missing_order?: AggregationsMissingOrder - script?: Script + script?: Script | string value_type?: AggregationsValueType order?: SortOrder } @@ -3145,7 +3159,7 @@ export interface AggregationsDateHistogramAggregation extends AggregationsBucket offset?: Duration order?: AggregationsAggregateOrder params?: Record - script?: Script + script?: Script | string time_zone?: TimeZone keyed?: boolean } @@ -3186,7 +3200,7 @@ export interface AggregationsDerivativeAggregation extends AggregationsPipelineA export interface AggregationsDiversifiedSamplerAggregation extends AggregationsBucketAggregationBase { execution_hint?: AggregationsSamplerAggregationExecutionHint max_docs_per_value?: integer - script?: Script + script?: Script | string shard_size?: integer field?: Field } @@ -3435,7 +3449,7 @@ export interface AggregationsHistogramAggregation extends AggregationsBucketAggr missing?: double offset?: double order?: AggregationsAggregateOrder - script?: Script + script?: Script | string format?: string keyed?: boolean } @@ -3623,7 +3637,7 @@ export interface AggregationsMedianAbsoluteDeviationAggregation extends Aggregat export interface AggregationsMetricAggregationBase { field?: Field missing?: AggregationsMissing - script?: Script + script?: Script | string } export interface AggregationsMinAggregate extends AggregationsSingleMetricAggregateBase { @@ -3777,7 +3791,7 @@ export interface AggregationsRangeAggregation extends AggregationsBucketAggregat field?: Field missing?: integer ranges?: AggregationsAggregationRange[] - script?: Script + script?: Script | string keyed?: boolean format?: string } @@ -3835,7 +3849,7 @@ export interface AggregationsSamplerAggregation extends AggregationsBucketAggreg export type AggregationsSamplerAggregationExecutionHint = 'map' | 'global_ordinals' | 'bytes_hash' export interface AggregationsScriptedHeuristic { - script: Script + script: Script | string } export interface AggregationsScriptedMetricAggregate extends AggregationsAggregateBase { @@ -3843,11 +3857,11 @@ export interface AggregationsScriptedMetricAggregate extends AggregationsAggrega } export interface AggregationsScriptedMetricAggregation extends AggregationsMetricAggregationBase { - combine_script?: Script - init_script?: Script - map_script?: Script + combine_script?: Script | string + init_script?: Script | string + map_script?: Script | string params?: Record - reduce_script?: Script + reduce_script?: Script | string } export interface AggregationsSerialDifferencingAggregation extends AggregationsPipelineAggregationBase { @@ -4060,7 +4074,8 @@ export interface AggregationsTermsAggregation extends AggregationsBucketAggregat missing_bucket?: boolean value_type?: string order?: AggregationsAggregateOrder - script?: Script + script?: Script | string + shard_min_doc_count?: long shard_size?: integer show_term_doc_count_error?: boolean size?: integer @@ -4086,7 +4101,7 @@ export interface AggregationsTermsPartition { export interface AggregationsTestPopulation { field: Field - script?: Script + script?: Script | string filter?: QueryDslQueryContainer } @@ -4159,7 +4174,7 @@ export interface AggregationsVariableWidthHistogramAggregation { buckets?: integer shard_size?: integer initial_buffer?: integer - script?: Script + script?: Script | string } export interface AggregationsVariableWidthHistogramBucketKeys extends AggregationsMultiBucketBase { @@ -4183,7 +4198,7 @@ export interface AggregationsWeightedAverageAggregation { export interface AggregationsWeightedAverageValue { field?: Field missing?: double - script?: Script + script?: Script | string } export interface AggregationsWeightedAvgAggregate extends AggregationsSingleMetricAggregateBase { @@ -4231,7 +4246,7 @@ export interface AnalysisCompoundWordTokenFilterBase extends AnalysisTokenFilter export interface AnalysisConditionTokenFilter extends AnalysisTokenFilterBase { type: 'condition' filter: string[] - script: Script + script: Script | string } export interface AnalysisCustomAnalyzer { @@ -4626,7 +4641,7 @@ export interface AnalysisPorterStemTokenFilter extends AnalysisTokenFilterBase { export interface AnalysisPredicateTokenFilter extends AnalysisTokenFilterBase { type: 'predicate_token_filter' - script: Script + script: Script | string } export interface AnalysisRemoveDuplicatesTokenFilter extends AnalysisTokenFilterBase { @@ -4663,7 +4678,7 @@ export type AnalysisSnowballLanguage = 'Armenian' | 'Basque' | 'Catalan' | 'Dani export interface AnalysisSnowballTokenFilter extends AnalysisTokenFilterBase { type: 'snowball' - language: AnalysisSnowballLanguage + language?: AnalysisSnowballLanguage } export interface AnalysisStandardAnalyzer { @@ -4910,8 +4925,9 @@ export interface MappingDateRangeProperty extends MappingRangePropertyBase { export interface MappingDenseVectorIndexOptions { type: string - m: integer - ef_construction: integer + m?: integer + ef_construction?: integer + confidence_interval?: float } export interface MappingDenseVectorProperty extends MappingPropertyBase { @@ -4944,7 +4960,7 @@ export interface MappingDynamicProperty extends MappingDocValuesPropertyBase { null_value?: FieldValue boost?: double coerce?: boolean - script?: Script + script?: Script | string on_script_error?: MappingOnScriptError ignore_malformed?: boolean time_series_metric?: MappingTimeSeriesMetricType @@ -5022,7 +5038,7 @@ export interface MappingGeoPointProperty extends MappingDocValuesPropertyBase { null_value?: GeoLocation index?: boolean on_script_error?: MappingOnScriptError - script?: Script + script?: Script | string type: 'geo_point' } @@ -5088,7 +5104,8 @@ export interface MappingIpProperty extends MappingDocValuesPropertyBase { ignore_malformed?: boolean null_value?: string on_script_error?: MappingOnScriptError - script?: Script + script?: Script | string + time_series_dimension?: boolean type: 'ip' } @@ -5107,12 +5124,13 @@ export interface MappingKeywordProperty extends MappingDocValuesPropertyBase { eager_global_ordinals?: boolean index?: boolean index_options?: MappingIndexOptions - script?: Script + script?: Script | string on_script_error?: MappingOnScriptError normalizer?: string norms?: boolean null_value?: string split_queries_on_whitespace?: boolean + time_series_dimension?: boolean type: 'keyword' } @@ -5151,7 +5169,9 @@ export interface MappingNumberPropertyBase extends MappingDocValuesPropertyBase ignore_malformed?: boolean index?: boolean on_script_error?: MappingOnScriptError - script?: Script + script?: Script | string + time_series_metric?: MappingTimeSeriesMetricType + time_series_dimension?: boolean } export interface MappingObjectProperty extends MappingCorePropertyBase { @@ -5209,7 +5229,7 @@ export interface MappingRuntimeField { input_field?: Field target_field?: Field target_index?: IndexName - script?: Script + script?: Script | string type: MappingRuntimeFieldType } @@ -5338,6 +5358,7 @@ export interface MappingTypeMapping { runtime?: Record enabled?: boolean subobjects?: boolean + _data_stream_timestamp?: MappingDataStreamTimestamp } export interface MappingUnsignedLongNumberProperty extends MappingNumberPropertyBase { @@ -5351,6 +5372,7 @@ export interface MappingVersionProperty extends MappingDocValuesPropertyBase { export interface MappingWildcardProperty extends MappingDocValuesPropertyBase { type: 'wildcard' + null_value?: string } export interface QueryDslBoolQuery extends QueryDslQueryBase { @@ -5395,28 +5417,22 @@ export interface QueryDslConstantScoreQuery extends QueryDslQueryBase { filter: QueryDslQueryContainer } -export interface QueryDslDateDecayFunctionKeys extends QueryDslDecayFunctionBase { +export interface QueryDslDateDecayFunctionKeys extends QueryDslDecayFunctionBase { } export type QueryDslDateDecayFunction = QueryDslDateDecayFunctionKeys -& { [property: string]: QueryDslDecayPlacement | QueryDslMultiValueMode } +& { [property: string]: QueryDslDecayPlacement | QueryDslMultiValueMode } export interface QueryDslDateDistanceFeatureQuery extends QueryDslDistanceFeatureQueryBase { } -export interface QueryDslDateRangeQuery extends QueryDslRangeQueryBase { - gt?: DateMath - gte?: DateMath - lt?: DateMath - lte?: DateMath - from?: DateMath | null - to?: DateMath | null +export interface QueryDslDateRangeQuery extends QueryDslRangeQueryBase { format?: DateFormat time_zone?: TimeZone } -export type QueryDslDecayFunction = QueryDslDateDecayFunction | QueryDslNumericDecayFunction | QueryDslGeoDecayFunction +export type QueryDslDecayFunction = QueryDslUntypedDecayFunction | QueryDslDateDecayFunction | QueryDslNumericDecayFunction | QueryDslGeoDecayFunction -export interface QueryDslDecayFunctionBase { +export interface QueryDslDecayFunctionBase { multi_value_mode?: QueryDslMultiValueMode } @@ -5432,7 +5448,7 @@ export interface QueryDslDisMaxQuery extends QueryDslQueryBase { tie_breaker?: double } -export type QueryDslDistanceFeatureQuery = QueryDslGeoDistanceFeatureQuery | QueryDslDateDistanceFeatureQuery +export type QueryDslDistanceFeatureQuery = QueryDslUntypedDistanceFeatureQuery | QueryDslGeoDistanceFeatureQuery | QueryDslDateDistanceFeatureQuery export interface QueryDslDistanceFeatureQueryBase extends QueryDslQueryBase { origin: TOrigin @@ -5507,10 +5523,10 @@ export interface QueryDslGeoBoundingBoxQueryKeys extends QueryDslQueryBase { export type QueryDslGeoBoundingBoxQuery = QueryDslGeoBoundingBoxQueryKeys & { [property: string]: GeoBounds | QueryDslGeoExecution | QueryDslGeoValidationMethod | boolean | float | string } -export interface QueryDslGeoDecayFunctionKeys extends QueryDslDecayFunctionBase { +export interface QueryDslGeoDecayFunctionKeys extends QueryDslDecayFunctionBase { } export type QueryDslGeoDecayFunction = QueryDslGeoDecayFunctionKeys -& { [property: string]: QueryDslDecayPlacement | QueryDslMultiValueMode } +& { [property: string]: QueryDslDecayPlacement | QueryDslMultiValueMode } export interface QueryDslGeoDistanceFeatureQuery extends QueryDslDistanceFeatureQueryBase { } @@ -5603,7 +5619,7 @@ export interface QueryDslIntervalsFilter { not_containing?: QueryDslIntervalsContainer not_overlapping?: QueryDslIntervalsContainer overlapping?: QueryDslIntervalsContainer - script?: Script + script?: Script | string } export interface QueryDslIntervalsFuzzy { @@ -5758,19 +5774,13 @@ export interface QueryDslNestedQuery extends QueryDslQueryBase { score_mode?: QueryDslChildScoreMode } -export interface QueryDslNumberRangeQuery extends QueryDslRangeQueryBase { - gt?: double - gte?: double - lt?: double - lte?: double - from?: double | null - to?: double | null +export interface QueryDslNumberRangeQuery extends QueryDslRangeQueryBase { } -export interface QueryDslNumericDecayFunctionKeys extends QueryDslDecayFunctionBase { +export interface QueryDslNumericDecayFunctionKeys extends QueryDslDecayFunctionBase { } export type QueryDslNumericDecayFunction = QueryDslNumericDecayFunctionKeys -& { [property: string]: QueryDslDecayPlacement | QueryDslMultiValueMode } +& { [property: string]: QueryDslDecayPlacement | QueryDslMultiValueMode } export type QueryDslOperator = 'and' | 'AND' | 'or' | 'OR' @@ -5806,6 +5816,7 @@ export interface QueryDslPinnedQuery extends QueryDslQueryBase { export interface QueryDslPrefixQuery extends QueryDslQueryBase { rewrite?: MultiTermQueryRewrite value: string + case_insensitive?: boolean } export interface QueryDslQueryBase { @@ -5817,11 +5828,12 @@ export interface QueryDslQueryContainer { bool?: QueryDslBoolQuery boosting?: QueryDslBoostingQuery common?: Partial> + combined_fields?: QueryDslCombinedFieldsQuery constant_score?: QueryDslConstantScoreQuery dis_max?: QueryDslDisMaxQuery distance_feature?: QueryDslDistanceFeatureQuery exists?: QueryDslExistsQuery - function_score?: QueryDslFunctionScoreQuery + function_score?: QueryDslFunctionScoreQuery | QueryDslFunctionScoreContainer[] fuzzy?: Partial> geo_bounding_box?: QueryDslGeoBoundingBoxQuery geo_distance?: QueryDslGeoDistanceQuery @@ -5849,9 +5861,10 @@ export interface QueryDslQueryContainer { range?: Partial> rank_feature?: QueryDslRankFeatureQuery regexp?: Partial> - rule_query?: QueryDslRuleQuery + rule?: QueryDslRuleQuery script?: QueryDslScriptQuery script_score?: QueryDslScriptScoreQuery + semantic?: QueryDslSemanticQuery shape?: QueryDslShapeQuery simple_query_string?: QueryDslSimpleQueryStringQuery span_containing?: QueryDslSpanContainingQuery @@ -5863,9 +5876,12 @@ export interface QueryDslQueryContainer { span_or?: QueryDslSpanOrQuery span_term?: Partial> span_within?: QueryDslSpanWithinQuery + sparse_vector?: QueryDslSparseVectorQuery term?: Partial> terms?: QueryDslTermsQuery terms_set?: Partial> + text_expansion?: Partial> + weighted_tokens?: Partial> wildcard?: Partial> wrapper?: QueryDslWrapperQuery type?: QueryDslTypeQuery @@ -5904,10 +5920,16 @@ export interface QueryDslRandomScoreFunction { seed?: long | string } -export type QueryDslRangeQuery = QueryDslDateRangeQuery | QueryDslNumberRangeQuery | QueryDslTermsRangeQuery +export type QueryDslRangeQuery = QueryDslUntypedRangeQuery | QueryDslDateRangeQuery | QueryDslNumberRangeQuery | QueryDslTermRangeQuery -export interface QueryDslRangeQueryBase extends QueryDslQueryBase { +export interface QueryDslRangeQueryBase extends QueryDslQueryBase { relation?: QueryDslRangeRelation + gt?: T + gte?: T + lt?: T + lte?: T + from?: T | null + to?: T | null } export type QueryDslRangeRelation = 'within' | 'contains' | 'intersects' @@ -5940,6 +5962,7 @@ export interface QueryDslRankFeatureQuery extends QueryDslQueryBase { } export interface QueryDslRegexpQuery extends QueryDslQueryBase { + case_insensitive?: boolean flags?: string max_determinized_states?: integer rewrite?: MultiTermQueryRewrite @@ -5948,22 +5971,22 @@ export interface QueryDslRegexpQuery extends QueryDslQueryBase { export interface QueryDslRuleQuery extends QueryDslQueryBase { organic: QueryDslQueryContainer - ruleset_id: Id + ruleset_ids: Id[] match_criteria: any } export interface QueryDslScriptQuery extends QueryDslQueryBase { - script: Script + script: Script | string } export interface QueryDslScriptScoreFunction { - script: Script + script: Script | string } export interface QueryDslScriptScoreQuery extends QueryDslQueryBase { min_score?: float query: QueryDslQueryContainer - script: Script + script: Script | string } export interface QueryDslSemanticQuery extends QueryDslQueryBase { @@ -6064,8 +6087,21 @@ export interface QueryDslSpanWithinQuery extends QueryDslQueryBase { little: QueryDslSpanQuery } +export interface QueryDslSparseVectorQuery extends QueryDslQueryBase { + field: Field + query_vector?: Record + inference_id?: Id + query?: string + prune?: boolean + pruning_config?: QueryDslTokenPruningConfig +} + export interface QueryDslTermQuery extends QueryDslQueryBase { value: FieldValue + case_insensitive?: boolean +} + +export interface QueryDslTermRangeQuery extends QueryDslRangeQueryBase { } export interface QueryDslTermsLookup { @@ -6082,24 +6118,16 @@ export type QueryDslTermsQuery = QueryDslTermsQueryKeys export type QueryDslTermsQueryField = FieldValue[] | QueryDslTermsLookup -export interface QueryDslTermsRangeQuery extends QueryDslRangeQueryBase { - gt?: string - gte?: string - lt?: string - lte?: string - from?: string | null - to?: string | null -} - export interface QueryDslTermsSetQuery extends QueryDslQueryBase { minimum_should_match_field?: Field - minimum_should_match_script?: Script + minimum_should_match_script?: Script | string terms: string[] } export interface QueryDslTextExpansionQuery extends QueryDslQueryBase { model_id: string model_text: string + pruning_config?: QueryDslTokenPruningConfig } export type QueryDslTextQueryType = 'best_fields' | 'most_fields' | 'cross_fields' | 'phrase' | 'phrase_prefix' | 'bool_prefix' @@ -6114,12 +6142,26 @@ export interface QueryDslTypeQuery extends QueryDslQueryBase { value: string } +export interface QueryDslUntypedDecayFunctionKeys extends QueryDslDecayFunctionBase { +} +export type QueryDslUntypedDecayFunction = QueryDslUntypedDecayFunctionKeys +& { [property: string]: QueryDslDecayPlacement | QueryDslMultiValueMode } + +export interface QueryDslUntypedDistanceFeatureQuery extends QueryDslDistanceFeatureQueryBase { +} + +export interface QueryDslUntypedRangeQuery extends QueryDslRangeQueryBase { + format?: DateFormat + time_zone?: TimeZone +} + export interface QueryDslWeightedTokensQuery extends QueryDslQueryBase { tokens: Record pruning_config?: QueryDslTokenPruningConfig } export interface QueryDslWildcardQuery extends QueryDslQueryBase { + case_insensitive?: boolean rewrite?: MultiTermQueryRewrite value?: string wildcard?: string @@ -6236,6 +6278,7 @@ export interface AsyncSearchSubmitRequest extends RequestBase { track_total_hits?: SearchTrackHits indices_boost?: Record[] docvalue_fields?: (QueryDslFieldAndFormat | Field)[] + knn?: KnnSearch | KnnSearch[] min_score?: double post_filter?: QueryDslQueryContainer profile?: boolean @@ -6371,28 +6414,39 @@ export interface CatAliasesRequest extends CatCatRequestBase { export type CatAliasesResponse = CatAliasesAliasesRecord[] export interface CatAllocationAllocationRecord { - shards?: string - s?: string - 'disk.indices'?: ByteSize | null - di?: ByteSize | null - diskIndices?: ByteSize | null - 'disk.used'?: ByteSize | null - du?: ByteSize | null - diskUsed?: ByteSize | null - 'disk.avail'?: ByteSize | null - da?: ByteSize | null - diskAvail?: ByteSize | null - 'disk.total'?: ByteSize | null - dt?: ByteSize | null - diskTotal?: ByteSize | null - 'disk.percent'?: Percentage | null - dp?: Percentage | null - diskPercent?: Percentage | null - host?: Host | null - h?: Host | null - ip?: Ip | null - node?: string - n?: string + shards: string + s: string + 'shards.undesired': string | null + 'write_load.forecast': double | null + wlf: double | null + writeLoadForecast: double | null + 'disk.indices.forecast': ByteSize | null + dif: ByteSize | null + diskIndicesForecast: ByteSize | null + 'disk.indices': ByteSize | null + di: ByteSize | null + diskIndices: ByteSize | null + 'disk.used': ByteSize | null + du: ByteSize | null + diskUsed: ByteSize | null + 'disk.avail': ByteSize | null + da: ByteSize | null + diskAvail: ByteSize | null + 'disk.total': ByteSize | null + dt: ByteSize | null + diskTotal: ByteSize | null + 'disk.percent': Percentage | null + dp: Percentage | null + diskPercent: Percentage | null + host: Host | null + h: Host | null + ip: Ip | null + node: string + n: string + 'node.role': string | null + r: string | null + role: string | null + nodeRole: string | null } export interface CatAllocationRequest extends CatCatRequestBase { @@ -7184,6 +7238,7 @@ export interface CatMlTrainedModelsTrainedModelsRecord { 'data_frame.analysis'?: string dfa?: string dataFrameAnalyticsAnalysis?: string + type?: string } export interface CatNodeattrsNodeAttributesRecord { @@ -8245,6 +8300,7 @@ export interface CcrGetAutoFollowPatternAutoFollowPatternSummary { remote_cluster: string follow_index_pattern?: IndexPattern leader_index_patterns: IndexPatterns + leader_index_exclusion_patterns: IndexPatterns max_outstanding_read_requests: integer } @@ -8360,6 +8416,7 @@ export interface ClusterComponentTemplateSummary { settings?: Record mappings?: MappingTypeMapping aliases?: Record + lifecycle?: IndicesDataStreamLifecycleWithRollover } export interface ClusterAllocationExplainAllocationDecision { @@ -8464,6 +8521,7 @@ export interface ClusterAllocationExplainResponse { remaining_delay_in_millis?: DurationValue shard: integer unassigned_info?: ClusterAllocationExplainUnassignedInformation + note?: string } export interface ClusterAllocationExplainUnassignedInformation { @@ -8503,6 +8561,7 @@ export type ClusterExistsComponentTemplateResponse = boolean export interface ClusterGetComponentTemplateRequest extends RequestBase { name?: Name flat_settings?: boolean + include_defaults?: boolean local?: boolean master_timeout?: Duration } @@ -8842,6 +8901,7 @@ export interface ClusterStatsClusterNodeCount { data: integer data_cold: integer data_content: integer + data_frozen?: integer data_hot: integer data_warm: integer ingest: integer @@ -8857,6 +8917,7 @@ export interface ClusterStatsClusterNodes { count: ClusterStatsClusterNodeCount discovery_types: Record fs: ClusterStatsClusterFileSystem + indexing_pressure: ClusterStatsIndexingPressure ingest: ClusterStatsClusterIngest jvm: ClusterStatsClusterJvm network_types: ClusterStatsClusterNetworkTypes @@ -8927,6 +8988,7 @@ export interface ClusterStatsFieldTypes { indexed_vector_count?: long indexed_vector_dim_max?: long indexed_vector_dim_min?: long + script_count?: integer } export interface ClusterStatsFieldTypesMappings { @@ -8973,6 +9035,7 @@ export interface ClusterStatsNodePackagingType { } export interface ClusterStatsOperatingSystemMemoryInfo { + adjusted_total_in_bytes?: long free_in_bytes: long free_percent: integer total_in_bytes: long @@ -9016,14 +9079,15 @@ export interface ClusterStatsStatsResponseBase extends NodesNodesResponseBase { export interface ConnectorConnector { api_key_id?: string + api_key_secret_id?: string configuration: ConnectorConnectorConfiguration custom_scheduling: ConnectorConnectorCustomScheduling description?: string - error?: string + error?: string | null features?: ConnectorConnectorFeatures filtering: ConnectorFilteringConfig[] id?: Id - index_name?: IndexName + index_name?: IndexName | null is_native: boolean language?: string last_access_control_sync_error?: string @@ -9040,8 +9104,9 @@ export interface ConnectorConnector { name?: string pipeline?: ConnectorIngestPipelineParams scheduling: ConnectorSchedulingConfiguration - service_type: string + service_type?: string status: ConnectorConnectorStatus + sync_cursor?: any sync_now: boolean } @@ -9056,11 +9121,11 @@ export interface ConnectorConnectorConfigProperties { placeholder?: string required: boolean sensitive: boolean - tooltip?: string + tooltip?: string | null type: ConnectorConnectorFieldType ui_restrictions: string[] validations: ConnectorValidation[] - value: ScalarValue + value: any } export type ConnectorConnectorConfiguration = Record @@ -9069,9 +9134,8 @@ export type ConnectorConnectorCustomScheduling = Record + last_access_control_sync_error?: string last_access_control_sync_scheduled_at?: DateTime last_access_control_sync_status?: ConnectorSyncStatus last_deleted_document_count?: long last_incremental_sync_scheduled_at?: DateTime last_indexed_document_count?: long - last_seen?: SpecUtilsWithNullValue - last_sync_error?: SpecUtilsWithNullValue + last_seen?: DateTime + last_sync_error?: string last_sync_scheduled_at?: DateTime last_sync_status?: ConnectorSyncStatus last_synced?: DateTime + sync_cursor?: any } export interface ConnectorLastSyncResponse { @@ -9299,7 +9365,7 @@ export interface ConnectorListResponse { export interface ConnectorPostRequest extends RequestBase { description?: string - index_name: SpecUtilsWithNullValue + index_name?: IndexName is_native?: boolean language?: string name?: string @@ -9307,13 +9373,14 @@ export interface ConnectorPostRequest extends RequestBase { } export interface ConnectorPostResponse { + result: Result id: Id } export interface ConnectorPutRequest extends RequestBase { - connector_id: Id + connector_id?: Id description?: string - index_name: SpecUtilsWithNullValue + index_name?: IndexName is_native?: boolean language?: string name?: string @@ -9322,6 +9389,7 @@ export interface ConnectorPutRequest extends RequestBase { export interface ConnectorPutResponse { result: Result + id: Id } export interface ConnectorSyncJobCancelRequest extends RequestBase { @@ -9349,7 +9417,7 @@ export interface ConnectorSyncJobListRequest extends RequestBase { size?: integer status?: ConnectorSyncStatus connector_id?: Id - job_type?: ConnectorSyncJobType[] + job_type?: ConnectorSyncJobType | ConnectorSyncJobType[] } export interface ConnectorSyncJobListResponse { @@ -9377,8 +9445,8 @@ export interface ConnectorUpdateActiveFilteringResponse { export interface ConnectorUpdateApiKeyIdRequest extends RequestBase { connector_id: Id - api_key_id?: SpecUtilsWithNullValue - api_key_secret_id?: SpecUtilsWithNullValue + api_key_id?: string + api_key_secret_id?: string } export interface ConnectorUpdateApiKeyIdResponse { @@ -9435,7 +9503,7 @@ export interface ConnectorUpdateIndexNameResponse { export interface ConnectorUpdateNameRequest extends RequestBase { connector_id: Id - name: string + name?: string description?: string } @@ -9601,6 +9669,7 @@ export interface EnrichStatsRequest extends RequestBase { export interface EnrichStatsResponse { coordinator_stats: EnrichStatsCoordinatorStats[] executing_policies: EnrichStatsExecutingPolicy[] + cache_stats?: EnrichStatsCacheStats[] } export interface EqlEqlHits { @@ -9622,12 +9691,13 @@ export interface EqlHitsEvent { _index: IndexName _id: Id _source: TEvent + missing?: boolean fields?: Record } export interface EqlHitsSequence { events: EqlHitsEvent[] - join_keys: any[] + join_keys?: any[] } export interface EqlDeleteRequest extends RequestBase { @@ -9675,20 +9745,39 @@ export interface EqlSearchRequest extends RequestBase { size?: uint fields?: QueryDslFieldAndFormat | Field | (QueryDslFieldAndFormat | Field)[] result_position?: EqlSearchResultPosition + runtime_mappings?: MappingRuntimeFields } export type EqlSearchResponse = EqlEqlSearchResponseBase export type EqlSearchResultPosition = 'tail' | 'head' +export interface EsqlTableValuesContainer { + integer?: EsqlTableValuesIntegerValue[] + keyword?: EsqlTableValuesKeywordValue[] + long?: EsqlTableValuesLongValue[] + double?: EsqlTableValuesLongDouble[] +} + +export type EsqlTableValuesIntegerValue = integer | integer[] + +export type EsqlTableValuesKeywordValue = string | string[] + +export type EsqlTableValuesLongDouble = double | double[] + +export type EsqlTableValuesLongValue = long | long[] + export interface EsqlQueryRequest extends RequestBase { format?: string delimiter?: string + drop_null_columns?: boolean columnar?: boolean filter?: QueryDslQueryContainer locale?: string - params?: ScalarValue[] + params?: FieldValue[] + profile?: boolean query: string + tables?: Record> } export type EsqlQueryResponse = EsqlColumns @@ -9912,6 +10001,7 @@ export interface IlmForceMergeConfiguration { export interface IlmPhase { actions?: IlmActions min_age?: Duration | long + configurations?: IlmConfigurations } export interface IlmPhases { @@ -10094,6 +10184,7 @@ export interface IndicesAliasDefinition { is_write_index?: boolean routing?: string search_routing?: string + is_hidden?: boolean } export interface IndicesCacheQueries { @@ -10109,9 +10200,11 @@ export interface IndicesDataStream { next_generation_managed_by: IndicesManagedBy prefer_ilm: boolean indices: IndicesDataStreamIndex[] + lifecycle?: IndicesDataStreamLifecycleWithRollover name: DataStreamName replicated?: boolean status: HealthStatus + system?: boolean template: Name timestamp_field: IndicesDataStreamTimestampField } @@ -10318,6 +10411,7 @@ export interface IndicesIndexState { settings?: IndicesIndexSettings defaults?: IndicesIndexSettings data_stream?: DataStreamName + lifecycle?: IndicesDataStreamLifecycle } export interface IndicesIndexTemplate { @@ -10340,6 +10434,7 @@ export interface IndicesIndexTemplateSummary { aliases?: Record mappings?: MappingTypeMapping settings?: IndicesIndexSettings + lifecycle?: IndicesDataStreamLifecycleWithRollover } export interface IndicesIndexVersioning { @@ -10401,6 +10496,7 @@ export interface IndicesMappingLimitSettingsNestedObjects { export interface IndicesMappingLimitSettingsTotalFields { limit?: long + ignore_dynamic_beyond_limit?: boolean } export interface IndicesMerge { @@ -10497,8 +10593,8 @@ export interface IndicesSettingsSimilarityLmj { export interface IndicesSettingsSimilarityScripted { type: 'scripted' - script: Script - weight_script?: Script + script: Script | string + weight_script?: Script | string } export interface IndicesSlowlogSettings { @@ -10971,6 +11067,7 @@ export interface IndicesGetRequest extends RequestBase { include_defaults?: boolean local?: boolean master_timeout?: Duration + features?: IndicesGetFeatures } export type IndicesGetResponse = Record @@ -11008,6 +11105,7 @@ export interface IndicesGetDataLifecycleResponse { export interface IndicesGetDataStreamRequest extends RequestBase { name?: DataStreamNames expand_wildcards?: ExpandWildcards + include_defaults?: boolean } export interface IndicesGetDataStreamResponse { @@ -11040,6 +11138,7 @@ export interface IndicesGetIndexTemplateRequest extends RequestBase { local?: boolean flat_settings?: boolean master_timeout?: Duration + include_defaults?: boolean } export interface IndicesGetIndexTemplateResponse { @@ -11157,6 +11256,7 @@ export interface IndicesPutIndexTemplateIndexTemplateMapping { aliases?: Record mappings?: MappingTypeMapping settings?: IndicesIndexSettings + lifecycle?: IndicesDataStreamLifecycle } export interface IndicesPutIndexTemplateRequest extends RequestBase { @@ -11561,6 +11661,7 @@ export interface IndicesShrinkResponse { export interface IndicesSimulateIndexTemplateRequest extends RequestBase { name: Name master_timeout?: Duration + include_defaults?: boolean } export interface IndicesSimulateIndexTemplateResponse { @@ -11577,6 +11678,7 @@ export interface IndicesSimulateTemplateRequest extends RequestBase { name?: Name create?: boolean master_timeout?: Duration + include_defaults?: boolean allow_auto_create?: boolean index_patterns?: Indices composed_of?: Name[] @@ -11637,6 +11739,7 @@ export interface IndicesStatsIndexStats { translog?: TranslogStats warmer?: WarmerStats bulk?: BulkStats + shard_stats?: IndicesStatsShardsTotalStats } export interface IndicesStatsIndicesStats { @@ -11644,6 +11747,8 @@ export interface IndicesStatsIndicesStats { shards?: Record total?: IndicesStatsIndexStats uuid?: Uuid + health?: HealthStatus + status?: IndicesStatsIndexMetadataState } export interface IndicesStatsMappingStats { @@ -11756,6 +11861,7 @@ export interface IndicesStatsShardStats { translog?: TranslogStats warmer?: WarmerStats bulk?: BulkStats + shards?: Record shard_stats?: IndicesStatsShardsTotalStats indices?: IndicesStatsIndicesStats } @@ -12163,41 +12269,41 @@ export interface IngestProcessorBase { } export interface IngestProcessorContainer { - attachment?: IngestAttachmentProcessor append?: IngestAppendProcessor - csv?: IngestCsvProcessor + attachment?: IngestAttachmentProcessor + bytes?: IngestBytesProcessor + circle?: IngestCircleProcessor convert?: IngestConvertProcessor + csv?: IngestCsvProcessor date?: IngestDateProcessor date_index_name?: IngestDateIndexNameProcessor + dissect?: IngestDissectProcessor dot_expander?: IngestDotExpanderProcessor + drop?: IngestDropProcessor enrich?: IngestEnrichProcessor fail?: IngestFailProcessor foreach?: IngestForeachProcessor - json?: IngestJsonProcessor - user_agent?: IngestUserAgentProcessor - kv?: IngestKeyValueProcessor geoip?: IngestGeoIpProcessor grok?: IngestGrokProcessor gsub?: IngestGsubProcessor + inference?: IngestInferenceProcessor join?: IngestJoinProcessor + json?: IngestJsonProcessor + kv?: IngestKeyValueProcessor lowercase?: IngestLowercaseProcessor + pipeline?: IngestPipelineProcessor remove?: IngestRemoveProcessor rename?: IngestRenameProcessor reroute?: IngestRerouteProcessor script?: IngestScriptProcessor set?: IngestSetProcessor + set_security_user?: IngestSetSecurityUserProcessor sort?: IngestSortProcessor split?: IngestSplitProcessor trim?: IngestTrimProcessor uppercase?: IngestUppercaseProcessor urldecode?: IngestUrlDecodeProcessor - bytes?: IngestBytesProcessor - dissect?: IngestDissectProcessor - set_security_user?: IngestSetSecurityUserProcessor - pipeline?: IngestPipelineProcessor - drop?: IngestDropProcessor - circle?: IngestCircleProcessor - inference?: IngestInferenceProcessor + user_agent?: IngestUserAgentProcessor } export interface IngestRemoveProcessor extends IngestProcessorBase { @@ -13230,6 +13336,14 @@ export type MlInclude = 'definition' | 'feature_importance_baseline' | 'hyperpar export interface MlInferenceConfigCreateContainer { regression?: MlRegressionInferenceOptions classification?: MlClassificationInferenceOptions + text_classification?: MlTextClassificationInferenceOptions + zero_shot_classification?: MlZeroShotClassificationInferenceOptions + fill_mask?: MlFillMaskInferenceOptions + ner?: MlNerInferenceOptions + pass_through?: MlPassThroughInferenceOptions + text_embedding?: MlTextEmbeddingInferenceOptions + text_expansion?: MlTextExpansionInferenceOptions + question_answering?: MlQuestionAnsweringInferenceOptions } export interface MlInferenceConfigUpdateContainer { @@ -13373,7 +13487,9 @@ export interface MlJobTimingStats { export type MlMemoryStatus = 'ok' | 'soft_limit' | 'hard_limit' export interface MlModelPlotConfig { + annotations_enabled?: boolean enabled?: boolean + terms?: Field } export interface MlModelSizeStats { @@ -13581,6 +13697,8 @@ export interface MlTimingStats { export interface MlTokenizationConfigContainer { bert?: MlNlpBertTokenizationConfig + mpnet?: MlNlpBertTokenizationConfig + roberta?: MlNlpRobertaTokenizationConfig } export type MlTokenizationTruncate = 'first' | 'second' | 'none' @@ -13627,6 +13745,7 @@ export interface MlTrainedModelAssignmentTaskParameters { model_bytes: integer model_id: Id deployment_id: Id + cache_size: ByteSize number_of_allocations: integer priority: MlTrainingPriority queue_capacity: integer @@ -13746,7 +13865,7 @@ export interface MlTrainedModelPrefixStrings { export interface MlTrainedModelSizeStats { model_size_bytes: ByteSize - required_native_memory_bytes: integer + required_native_memory_bytes: ByteSize } export interface MlTrainedModelStats { @@ -14509,6 +14628,8 @@ export interface MlPutDataFrameAnalyticsRequest extends RequestBase { max_num_threads?: integer model_memory_limit?: string source: MlDataframeAnalyticsSource + headers?: HttpHeaders + version?: VersionString } export interface MlPutDataFrameAnalyticsResponse { @@ -14547,6 +14668,7 @@ export interface MlPutDatafeedRequest extends RequestBase { runtime_mappings?: MappingRuntimeFields script_fields?: Record scroll_size?: integer + headers?: HttpHeaders } export interface MlPutDatafeedResponse { @@ -14664,6 +14786,8 @@ export interface MlPutTrainedModelPreprocessor { export interface MlPutTrainedModelRequest extends RequestBase { model_id: Id + defer_definition_decompression?: boolean + wait_for_completion?: boolean compressed_definition?: string definition?: MlPutTrainedModelDefinition description?: string @@ -14674,6 +14798,7 @@ export interface MlPutTrainedModelRequest extends RequestBase { model_size_bytes?: long platform_architecture?: string tags?: string[] + prefix_strings?: MlTrainedModelPrefixStrings } export type MlPutTrainedModelResponse = MlTrainedModelConfig @@ -14735,6 +14860,8 @@ export type MlPutTrainedModelDefinitionPartResponse = AcknowledgedResponseBase export interface MlPutTrainedModelVocabularyRequest extends RequestBase { model_id: Id vocabulary: string[] + merges?: string[] + scores?: double[] } export type MlPutTrainedModelVocabularyResponse = AcknowledgedResponseBase @@ -15688,6 +15815,7 @@ export interface NodesInfoNodeInfoSettingsCluster { routing?: IndicesIndexRouting election: NodesInfoNodeInfoSettingsClusterElection initial_master_nodes?: string[] + deprecation_indexing?: NodesInfoDeprecationIndexing } export interface NodesInfoNodeInfoSettingsClusterElection { @@ -15932,66 +16060,95 @@ export interface NodesUsageResponseBase extends NodesNodesResponseBase { nodes: Record } -export interface QueryRulesetQueryRule { +export interface QueryRulesQueryRule { rule_id: Id - type: QueryRulesetQueryRuleType - criteria: QueryRulesetQueryRuleCriteria[] - actions: QueryRulesetQueryRuleActions + type: QueryRulesQueryRuleType + criteria: QueryRulesQueryRuleCriteria | QueryRulesQueryRuleCriteria[] + actions: QueryRulesQueryRuleActions + priority?: integer } -export interface QueryRulesetQueryRuleActions { +export interface QueryRulesQueryRuleActions { ids?: Id[] docs?: QueryDslPinnedDoc[] } -export interface QueryRulesetQueryRuleCriteria { - type: QueryRulesetQueryRuleCriteriaType - metadata: string +export interface QueryRulesQueryRuleCriteria { + type: QueryRulesQueryRuleCriteriaType + metadata?: string values?: any[] } -export type QueryRulesetQueryRuleCriteriaType = 'global' | 'exact' | 'exact_fuzzy' | 'prefix' | 'suffix' | 'contains' | 'lt' | 'lte' | 'gt' | 'gte' +export type QueryRulesQueryRuleCriteriaType = 'global' | 'exact' | 'exact_fuzzy' | 'fuzzy' | 'prefix' | 'suffix' | 'contains' | 'lt' | 'lte' | 'gt' | 'gte' | 'always' + +export type QueryRulesQueryRuleType = 'pinned' + +export interface QueryRulesQueryRuleset { + ruleset_id: Id + rules: QueryRulesQueryRule[] +} + +export interface QueryRulesDeleteRuleRequest extends RequestBase { + ruleset_id: Id + rule_id: Id +} -export type QueryRulesetQueryRuleType = 'pinned' +export type QueryRulesDeleteRuleResponse = AcknowledgedResponseBase -export interface QueryRulesetQueryRuleset { +export interface QueryRulesDeleteRulesetRequest extends RequestBase { ruleset_id: Id - rules: QueryRulesetQueryRule[] } -export interface QueryRulesetDeleteRequest extends RequestBase { +export type QueryRulesDeleteRulesetResponse = AcknowledgedResponseBase + +export interface QueryRulesGetRuleRequest extends RequestBase { ruleset_id: Id + rule_id: Id } -export type QueryRulesetDeleteResponse = AcknowledgedResponseBase +export type QueryRulesGetRuleResponse = QueryRulesQueryRule -export interface QueryRulesetGetRequest extends RequestBase { +export interface QueryRulesGetRulesetRequest extends RequestBase { ruleset_id: Id } -export type QueryRulesetGetResponse = QueryRulesetQueryRuleset +export type QueryRulesGetRulesetResponse = QueryRulesQueryRuleset -export interface QueryRulesetListQueryRulesetListItem { +export interface QueryRulesListRulesetsQueryRulesetListItem { ruleset_id: Id - rules_count: integer + rule_total_count: integer + rule_criteria_types_counts: Record } -export interface QueryRulesetListRequest extends RequestBase { +export interface QueryRulesListRulesetsRequest extends RequestBase { from?: integer size?: integer } -export interface QueryRulesetListResponse { +export interface QueryRulesListRulesetsResponse { count: long - results: QueryRulesetListQueryRulesetListItem[] + results: QueryRulesListRulesetsQueryRulesetListItem[] +} + +export interface QueryRulesPutRuleRequest extends RequestBase { + ruleset_id: Id + rule_id: Id + type: QueryRulesQueryRuleType + criteria: QueryRulesQueryRuleCriteria | QueryRulesQueryRuleCriteria[] + actions: QueryRulesQueryRuleActions + priority?: integer +} + +export interface QueryRulesPutRuleResponse { + result: Result } -export interface QueryRulesetPutRequest extends RequestBase { +export interface QueryRulesPutRulesetRequest extends RequestBase { ruleset_id: Id - rules: QueryRulesetQueryRule[] + rules: QueryRulesQueryRule | QueryRulesQueryRule[] } -export interface QueryRulesetPutResponse { +export interface QueryRulesPutRulesetResponse { result: Result } @@ -16199,7 +16356,7 @@ export interface SearchApplicationSearchApplication { } export interface SearchApplicationSearchApplicationTemplate { - script: InlineScript | string + script: Script | string } export interface SearchApplicationDeleteRequest extends RequestBase { @@ -16266,6 +16423,7 @@ export type SearchApplicationPutBehavioralAnalyticsResponse = SearchApplicationP export interface SearchApplicationSearchRequest extends RequestBase { name: Name + typed_keys?: boolean params?: Record } @@ -16347,8 +16505,12 @@ export interface SecurityApiKey { invalidated?: boolean name: Name realm?: string + realm_type?: string username?: Username + profile_uid?: string + metadata?: Metadata role_descriptors?: Record + limited_by?: Record[] _sort?: SortResults } @@ -16362,11 +16524,16 @@ export interface SecurityApplicationPrivileges { resources: string[] } +export interface SecurityBulkError { + count: integer + details: Record +} + export interface SecurityClusterNode { name: Name } -export type SecurityClusterPrivilege = 'all' | 'cancel_task' | 'create_snapshot' | 'cross_cluster_replication' | 'cross_cluster_search' | 'delegate_pki' | 'grant_api_key' | 'manage' | 'manage_api_key' | 'manage_autoscaling' | 'manage_behavioral_analytics' | 'manage_ccr' | 'manage_data_frame_transforms' | 'manage_data_stream_global_retention' | 'manage_enrich' | 'manage_ilm' | 'manage_index_templates' | 'manage_inference' | 'manage_ingest_pipelines' | 'manage_logstash_pipelines' | 'manage_ml' | 'manage_oidc' | 'manage_own_api_key' | 'manage_pipeline' | 'manage_rollup' | 'manage_saml' | 'manage_search_application' | 'manage_search_query_rules' | 'manage_search_synonyms' | 'manage_security' | 'manage_service_account' | 'manage_slm' | 'manage_token' | 'manage_transform' | 'manage_user_profile' | 'manage_watcher' | 'monitor' | 'monitor_data_frame_transforms' | 'monitor_data_stream_global_retention' | 'monitor_enrich' | 'monitor_inference' | 'monitor_ml' | 'monitor_rollup' | 'monitor_snapshot' | 'monitor_text_structure' | 'monitor_transform' | 'monitor_watcher' | 'none' | 'post_behavioral_analytics_event' | 'read_ccr' | 'read_connector_secrets' | 'read_fleet_secrets' | 'read_ilm' | 'read_pipeline' | 'read_security' | 'read_slm' | 'transport_client' | 'write_connector_secrets' | 'write_fleet_secrets' | string +export type SecurityClusterPrivilege = 'all' | 'cancel_task' | 'create_snapshot' | 'cross_cluster_replication' | 'cross_cluster_search' | 'delegate_pki' | 'grant_api_key' | 'manage' | 'manage_api_key' | 'manage_autoscaling' | 'manage_behavioral_analytics' | 'manage_ccr' | 'manage_data_frame_transforms' | 'manage_data_stream_global_retention' | 'manage_enrich' | 'manage_ilm' | 'manage_index_templates' | 'manage_inference' | 'manage_ingest_pipelines' | 'manage_logstash_pipelines' | 'manage_ml' | 'manage_oidc' | 'manage_own_api_key' | 'manage_pipeline' | 'manage_rollup' | 'manage_saml' | 'manage_search_application' | 'manage_search_query_rules' | 'manage_search_synonyms' | 'manage_security' | 'manage_service_account' | 'manage_slm' | 'manage_token' | 'manage_transform' | 'manage_user_profile' | 'manage_watcher' | 'monitor' | 'monitor_data_frame_transforms' | 'monitor_data_stream_global_retention' | 'monitor_enrich' | 'monitor_inference' | 'monitor_ml' | 'monitor_rollup' | 'monitor_snapshot' | 'monitor_text_structure' | 'monitor_transform' | 'monitor_watcher' | 'none' | 'post_behavioral_analytics_event' | 'read_ccr' | 'read_fleet_secrets' | 'read_ilm' | 'read_pipeline' | 'read_security' | 'read_slm' | 'transport_client' | 'write_connector_secrets' | 'write_fleet_secrets' | string export interface SecurityCreatedStatus { created: boolean @@ -16410,24 +16577,24 @@ export interface SecurityRealmInfo { } export interface SecurityRoleDescriptor { - cluster?: string[] + cluster?: SecurityClusterPrivilege[] indices?: SecurityIndicesPrivileges[] index?: SecurityIndicesPrivileges[] - global?: SecurityGlobalPrivilege[] | SecurityGlobalPrivilege applications?: SecurityApplicationPrivileges[] metadata?: Metadata run_as?: string[] + description?: string transient_metadata?: Record } export interface SecurityRoleDescriptorRead { - cluster: string[] + cluster: SecurityClusterPrivilege[] indices: SecurityIndicesPrivileges[] index: SecurityIndicesPrivileges[] - global?: SecurityGlobalPrivilege[] | SecurityGlobalPrivilege applications?: SecurityApplicationPrivileges[] metadata?: Metadata run_as?: string[] + description?: string transient_metadata?: Record } @@ -16448,22 +16615,22 @@ export interface SecurityRoleMappingRule { export interface SecurityRoleTemplate { format?: SecurityTemplateFormat - template: Script + template: Script | string } export type SecurityRoleTemplateInlineQuery = string | QueryDslQueryContainer -export interface SecurityRoleTemplateInlineScript extends ScriptBase { - lang?: ScriptLanguage - options?: Record - source: SecurityRoleTemplateInlineQuery -} - export interface SecurityRoleTemplateQuery { - template?: SecurityRoleTemplateScript + template?: SecurityRoleTemplateScript | SecurityRoleTemplateInlineQuery } -export type SecurityRoleTemplateScript = SecurityRoleTemplateInlineScript | SecurityRoleTemplateInlineQuery | StoredScriptId +export interface SecurityRoleTemplateScript { + source?: SecurityRoleTemplateInlineQuery + id?: Id + params?: Record + lang?: ScriptLanguage + options?: Record +} export type SecurityTemplateFormat = 'string' | 'json' @@ -16537,10 +16704,35 @@ export interface SecurityAuthenticateResponse { username: Username enabled: boolean authentication_type: string + token?: SecurityAuthenticateToken } export interface SecurityAuthenticateToken { name: Name + type?: string +} + +export interface SecurityBulkDeleteRoleRequest extends RequestBase { + refresh?: Refresh + names: string[] +} + +export interface SecurityBulkDeleteRoleResponse { + deleted?: string[] + not_found?: string[] + errors?: SecurityBulkError +} + +export interface SecurityBulkPutRoleRequest extends RequestBase { + refresh?: Refresh + roles: Record +} + +export interface SecurityBulkPutRoleResponse { + created?: string[] + updated?: string[] + noop?: string[] + errors?: SecurityBulkError } export interface SecurityChangePasswordRequest extends RequestBase { @@ -16611,6 +16803,7 @@ export interface SecurityCreateApiKeyRequest extends RequestBase { expiration?: Duration name?: Name role_descriptors?: Record + metadata?: Metadata } export interface SecurityCreateApiKeyResponse { @@ -16618,6 +16811,7 @@ export interface SecurityCreateApiKeyResponse { expiration?: long id: Id name: Name + encoded: string } export interface SecurityCreateServiceTokenRequest extends RequestBase { @@ -16748,6 +16942,9 @@ export interface SecurityGetApiKeyRequest extends RequestBase { owner?: boolean realm_name?: Name username?: Username + with_limited_by?: boolean + active_only?: boolean + with_profile_uid?: boolean } export interface SecurityGetApiKeyResponse { @@ -16783,6 +16980,7 @@ export interface SecurityGetRoleRole { transient_metadata?: Record applications: SecurityApplicationPrivileges[] role_templates?: SecurityRoleTemplate[] + global?: Record>> } export interface SecurityGetRoleMappingRequest extends RequestBase { @@ -16863,6 +17061,7 @@ export interface SecurityGetTokenUserRealm { export interface SecurityGetUserRequest extends RequestBase { username?: Username | Username[] + with_profile_uid?: boolean } export type SecurityGetUserResponse = Record @@ -17028,6 +17227,7 @@ export interface SecurityPutRoleRequest extends RequestBase { indices?: SecurityIndicesPrivileges[] metadata?: Metadata run_as?: string[] + description?: string transient_metadata?: Record } @@ -17106,6 +17306,9 @@ export interface SecurityQueryApiKeysApiKeyQueryContainer { } export interface SecurityQueryApiKeysRequest extends RequestBase { + with_limited_by?: boolean + with_profile_uid?: boolean + typed_keys?: boolean aggregations?: Record /** @alias aggregations */ aggs?: Record @@ -17123,6 +17326,72 @@ export interface SecurityQueryApiKeysResponse { aggregations?: Record } +export interface SecurityQueryRoleQueryRole extends SecurityRoleDescriptor { + _sort?: SortResults + name: string +} + +export interface SecurityQueryRoleRequest extends RequestBase { + query?: SecurityQueryRoleRoleQueryContainer + from?: integer + sort?: Sort + size?: integer + search_after?: SortResults +} + +export interface SecurityQueryRoleResponse { + total: integer + count: integer + roles: SecurityQueryRoleQueryRole[] +} + +export interface SecurityQueryRoleRoleQueryContainer { + bool?: QueryDslBoolQuery + exists?: QueryDslExistsQuery + ids?: QueryDslIdsQuery + match?: Partial> + match_all?: QueryDslMatchAllQuery + prefix?: Partial> + range?: Partial> + simple_query_string?: QueryDslSimpleQueryStringQuery + term?: Partial> + terms?: QueryDslTermsQuery + wildcard?: Partial> +} + +export interface SecurityQueryUserQueryUser extends SecurityUser { + _sort?: SortResults +} + +export interface SecurityQueryUserRequest extends RequestBase { + with_profile_uid?: boolean + query?: SecurityQueryUserUserQueryContainer + from?: integer + sort?: Sort + size?: integer + search_after?: SortResults +} + +export interface SecurityQueryUserResponse { + total: integer + count: integer + users: SecurityQueryUserQueryUser[] +} + +export interface SecurityQueryUserUserQueryContainer { + ids?: QueryDslIdsQuery + bool?: QueryDslBoolQuery + exists?: QueryDslExistsQuery + match?: Partial> + match_all?: QueryDslMatchAllQuery + prefix?: Partial> + range?: Partial> + simple_query_string?: QueryDslSimpleQueryStringQuery + term?: Partial> + terms?: QueryDslTermsQuery + wildcard?: Partial> +} + export interface SecuritySamlAuthenticateRequest extends RequestBase { content: string ids: Ids @@ -17561,8 +17830,10 @@ export interface SnapshotSnapshotInfo { failures?: SnapshotSnapshotShardFailure[] include_global_state?: boolean indices?: IndexName[] + index_details?: Record metadata?: Metadata reason?: string + repository?: Name snapshot: Name shards?: ShardStatistics start_time?: DateTime @@ -17660,6 +17931,7 @@ export interface SnapshotCreateRequest extends RequestBase { } export interface SnapshotCreateResponse { + accepted?: boolean snapshot?: SnapshotSnapshotInfo } @@ -17695,12 +17967,23 @@ export interface SnapshotGetRequest extends RequestBase { ignore_unavailable?: boolean master_timeout?: Duration verbose?: boolean + index_details?: boolean + index_names?: boolean include_repository?: boolean + sort?: SnapshotSnapshotSort + size?: integer + order?: SortOrder + after?: string + offset?: integer + from_sort_value?: string + slm_policy_filter?: Name } export interface SnapshotGetResponse { responses?: SnapshotGetSnapshotResponseItem[] snapshots?: SnapshotSnapshotInfo[] + total: integer + remaining: integer } export interface SnapshotGetSnapshotResponseItem { @@ -18159,11 +18442,13 @@ export interface TransformSettings { deduce_mappings?: boolean docs_per_second?: float max_page_search_size?: integer + unattended?: boolean } export interface TransformSource { index: Indices query?: QueryDslQueryContainer + runtime_mappings?: MappingRuntimeFields } export interface TransformSyncContainer { @@ -18268,9 +18553,9 @@ export interface TransformGetTransformStatsTransformIndexerStats { export interface TransformGetTransformStatsTransformProgress { docs_indexed: long docs_processed: long - docs_remaining: long - percent_complete: double - total_docs: long + docs_remaining?: long + percent_complete?: double + total_docs?: long } export interface TransformGetTransformStatsTransformStats { @@ -18419,6 +18704,7 @@ export interface WatcherAction { email?: WatcherEmailAction pagerduty?: WatcherPagerDutyAction slack?: WatcherSlackAction + webhook?: WatcherWebhookAction } export type WatcherActionExecutionMode = 'simulate' | 'force_simulate' | 'execute' | 'force_execute' | 'skip' @@ -19115,6 +19401,7 @@ export interface XpackInfoFeatures { vectors?: XpackInfoFeature voting_only: XpackInfoFeature watcher: XpackInfoFeature + archive: XpackInfoFeature } export interface XpackInfoMinimalLicenseInformation { @@ -19202,6 +19489,7 @@ export interface XpackUsageDataTierPhaseStatistics { export interface XpackUsageDataTiers extends XpackUsageBase { data_warm: XpackUsageDataTierPhaseStatistics + data_frozen?: XpackUsageDataTierPhaseStatistics data_cold: XpackUsageDataTierPhaseStatistics data_content: XpackUsageDataTierPhaseStatistics data_hot: XpackUsageDataTierPhaseStatistics @@ -19335,6 +19623,7 @@ export interface XpackUsageMlDataFrameAnalyticsJobsMemory { export interface XpackUsageMlInference { ingest_processors: Record trained_models: XpackUsageMlInferenceTrainedModels + deployments?: XpackUsageMlInferenceDeployments } export interface XpackUsageMlInferenceDeployments { @@ -19366,6 +19655,7 @@ export interface XpackUsageMlInferenceTrainedModels { estimated_heap_memory_usage_bytes?: MlJobStatistics count?: XpackUsageMlInferenceTrainedModelsCount _all: XpackUsageMlCounter + model_size_bytes?: MlJobStatistics } export interface XpackUsageMlInferenceTrainedModelsCount { @@ -19418,6 +19708,7 @@ export interface XpackUsageRequest extends RequestBase { export interface XpackUsageResponse { aggregate_metric: XpackUsageBase analytics: XpackUsageAnalytics + archive: XpackUsageArchive watcher: XpackUsageWatcher ccr: XpackUsageCcr data_frame?: XpackUsageBase diff --git a/src/api/typesWithBodyKey.ts b/src/api/typesWithBodyKey.ts index 510d9be..f9c0b80 100644 --- a/src/api/typesWithBodyKey.ts +++ b/src/api/typesWithBodyKey.ts @@ -96,7 +96,7 @@ export interface BulkUpdateAction stored_fields?: Fields docvalue_fields?: (QueryDslFieldAndFormat | Field)[] + knn?: KnnSearch | KnnSearch[] from?: integer highlight?: SearchHighlight indices_boost?: Record[] @@ -868,7 +880,7 @@ export interface MtermvectorsResponse { } export interface MtermvectorsTermVectorsResult { - _id: Id + _id?: Id _index: IndexName _version?: VersionNumber took?: long @@ -973,7 +985,7 @@ export interface RankEvalRankEvalQuery { export interface RankEvalRankEvalRequestItem { id: Id - request?: RankEvalRankEvalQuery + request?: RankEvalRankEvalQuery | QueryDslQueryContainer ratings: RankEvalDocumentRating[] template_id?: Id params?: Record @@ -1034,7 +1046,7 @@ export interface ReindexRequest extends RequestBase { conflicts?: Conflicts dest: ReindexDestination max_docs?: long - script?: Script + script?: Script | string size?: long source: ReindexSource } @@ -1137,7 +1149,7 @@ export interface ScriptsPainlessExecuteRequest extends RequestBase { body?: { context?: string context_setup?: ScriptsPainlessExecutePainlessContextSetup - script?: InlineScript | string + script?: Script | string } } @@ -1201,11 +1213,14 @@ export interface SearchRequest extends RequestBase { track_total_hits?: SearchTrackHits indices_boost?: Record[] docvalue_fields?: (QueryDslFieldAndFormat | Field)[] + knn?: KnnSearch | KnnSearch[] + rank?: RankContainer min_score?: double post_filter?: QueryDslQueryContainer profile?: boolean query?: QueryDslQueryContainer rescore?: SearchRescore | SearchRescore[] + retriever?: RetrieverContainer script_fields?: Record search_after?: SortResults size?: integer @@ -1466,6 +1481,7 @@ export interface SearchHit { _node?: string _routing?: string _source?: TDocument + _rank?: integer _seq_no?: SequenceNumber _primary_term?: long _version?: VersionNumber @@ -1792,6 +1808,7 @@ export interface SearchTemplateRequest extends RequestBase { routing?: Routing scroll?: Duration search_type?: SearchType + rest_total_hits_as_int?: boolean typed_keys?: boolean /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ body?: { @@ -1880,7 +1897,7 @@ export interface TermvectorsRequest extends RequestBase { export interface TermvectorsResponse { found: boolean - _id: Id + _id?: Id _index: IndexName term_vectors?: Record took: long @@ -1896,7 +1913,7 @@ export interface TermvectorsTerm { } export interface TermvectorsTermVector { - field_statistics: TermvectorsFieldStatistics + field_statistics?: TermvectorsFieldStatistics terms: Record } @@ -1926,7 +1943,7 @@ export interface UpdateRequest detect_noop?: boolean doc?: TPartialDocument doc_as_upsert?: boolean - script?: Script + script?: Script | string scripted_upsert?: boolean _source?: SearchSourceConfig upsert?: TDocument @@ -1973,7 +1990,7 @@ export interface UpdateByQueryRequest extends RequestBase { body?: { max_docs?: long query?: QueryDslQueryContainer - script?: Script + script?: Script | string slice?: SlicedScroll conflicts?: Conflicts } @@ -2231,9 +2248,10 @@ export interface GeoDistanceSortKeys { ignore_unmapped?: boolean order?: SortOrder unit?: DistanceUnit + nested?: NestedSortValue } export type GeoDistanceSort = GeoDistanceSortKeys -& { [property: string]: GeoLocation | GeoLocation[] | SortMode | GeoDistanceType | boolean | SortOrder | DistanceUnit } +& { [property: string]: GeoLocation | GeoLocation[] | SortMode | GeoDistanceType | boolean | SortOrder | DistanceUnit | NestedSortValue } export type GeoDistanceType = 'arc' | 'plane' @@ -2339,12 +2357,6 @@ export interface InlineGetKeys { export type InlineGet = InlineGetKeys & { [property: string]: any } -export interface InlineScript extends ScriptBase { - lang?: ScriptLanguage - options?: Record - source: string -} - export type Ip = string export interface KnnQuery extends QueryDslQueryBase { @@ -2435,6 +2447,7 @@ export interface NodeAttributes { name: NodeName transport_address: TransportAddress roles?: NodeRoles + external_id?: string } export type NodeId = string @@ -2577,7 +2590,7 @@ export type Routing = string export interface RrfRank { rank_constant?: long - window_size?: long + rank_window_size?: long } export type ScalarValue = long | double | string | boolean | null @@ -2586,14 +2599,16 @@ export interface ScoreSort { order?: SortOrder } -export type Script = InlineScript | string | StoredScriptId - -export interface ScriptBase { +export interface Script { + source?: string + id?: Id params?: Record + lang?: ScriptLanguage + options?: Record } export interface ScriptField { - script: Script + script: Script | string ignore_failure?: boolean } @@ -2601,7 +2616,7 @@ export type ScriptLanguage = 'painless' | 'expression' | 'mustache' | 'java' | s export interface ScriptSort { order?: SortOrder - script: Script + script: Script | string type?: ScriptSortType mode?: SortMode nested?: NestedSortValue @@ -2752,10 +2767,6 @@ export interface StoredScript { source: string } -export interface StoredScriptId extends ScriptBase { - id: Id -} - export type SuggestMode = 'missing' | 'popular' | 'always' export type SuggestionName = string @@ -2909,7 +2920,10 @@ export interface AggregationsAggregationContainer { bucket_script?: AggregationsBucketScriptAggregation bucket_selector?: AggregationsBucketSelectorAggregation bucket_sort?: AggregationsBucketSortAggregation + bucket_count_ks_test?: AggregationsBucketKsAggregation + bucket_correlation?: AggregationsBucketCorrelationAggregation cardinality?: AggregationsCardinalityAggregation + categorize_text?: AggregationsCategorizeTextAggregation children?: AggregationsChildrenAggregation composite?: AggregationsCompositeAggregation cumulative_cardinality?: AggregationsCumulativeCardinalityAggregation @@ -3000,7 +3014,7 @@ export interface AggregationsAutoDateHistogramAggregation extends AggregationsBu missing?: DateTime offset?: string params?: Record - script?: Script + script?: Script | string time_zone?: TimeZone } @@ -3070,11 +3084,11 @@ export interface AggregationsBucketPathAggregation { } export interface AggregationsBucketScriptAggregation extends AggregationsPipelineAggregationBase { - script?: Script + script?: Script | string } export interface AggregationsBucketSelectorAggregation extends AggregationsPipelineAggregationBase { - script?: Script + script?: Script | string } export interface AggregationsBucketSortAggregation { @@ -3088,7 +3102,7 @@ export type AggregationsBuckets = Record | T export type AggregationsBucketsPath = string | string[] | Record -export type AggregationsCalendarInterval = 'second' | '1s' | 'minute' | '1m' | 'hour' | '1h' | 'day' | '1d' | 'week' | '1w' | 'month' | '1M' | 'quarter' | '1q' | 'year' | '1Y' +export type AggregationsCalendarInterval = 'second' | '1s' | 'minute' | '1m' | 'hour' | '1h' | 'day' | '1d' | 'week' | '1w' | 'month' | '1M' | 'quarter' | '1q' | 'year' | '1y' export interface AggregationsCardinalityAggregate extends AggregationsAggregateBase { value: long @@ -3147,7 +3161,7 @@ export interface AggregationsCompositeAggregationBase { field?: Field missing_bucket?: boolean missing_order?: AggregationsMissingOrder - script?: Script + script?: Script | string value_type?: AggregationsValueType order?: SortOrder } @@ -3218,7 +3232,7 @@ export interface AggregationsDateHistogramAggregation extends AggregationsBucket offset?: Duration order?: AggregationsAggregateOrder params?: Record - script?: Script + script?: Script | string time_zone?: TimeZone keyed?: boolean } @@ -3259,7 +3273,7 @@ export interface AggregationsDerivativeAggregation extends AggregationsPipelineA export interface AggregationsDiversifiedSamplerAggregation extends AggregationsBucketAggregationBase { execution_hint?: AggregationsSamplerAggregationExecutionHint max_docs_per_value?: integer - script?: Script + script?: Script | string shard_size?: integer field?: Field } @@ -3508,7 +3522,7 @@ export interface AggregationsHistogramAggregation extends AggregationsBucketAggr missing?: double offset?: double order?: AggregationsAggregateOrder - script?: Script + script?: Script | string format?: string keyed?: boolean } @@ -3696,7 +3710,7 @@ export interface AggregationsMedianAbsoluteDeviationAggregation extends Aggregat export interface AggregationsMetricAggregationBase { field?: Field missing?: AggregationsMissing - script?: Script + script?: Script | string } export interface AggregationsMinAggregate extends AggregationsSingleMetricAggregateBase { @@ -3850,7 +3864,7 @@ export interface AggregationsRangeAggregation extends AggregationsBucketAggregat field?: Field missing?: integer ranges?: AggregationsAggregationRange[] - script?: Script + script?: Script | string keyed?: boolean format?: string } @@ -3908,7 +3922,7 @@ export interface AggregationsSamplerAggregation extends AggregationsBucketAggreg export type AggregationsSamplerAggregationExecutionHint = 'map' | 'global_ordinals' | 'bytes_hash' export interface AggregationsScriptedHeuristic { - script: Script + script: Script | string } export interface AggregationsScriptedMetricAggregate extends AggregationsAggregateBase { @@ -3916,11 +3930,11 @@ export interface AggregationsScriptedMetricAggregate extends AggregationsAggrega } export interface AggregationsScriptedMetricAggregation extends AggregationsMetricAggregationBase { - combine_script?: Script - init_script?: Script - map_script?: Script + combine_script?: Script | string + init_script?: Script | string + map_script?: Script | string params?: Record - reduce_script?: Script + reduce_script?: Script | string } export interface AggregationsSerialDifferencingAggregation extends AggregationsPipelineAggregationBase { @@ -4133,7 +4147,8 @@ export interface AggregationsTermsAggregation extends AggregationsBucketAggregat missing_bucket?: boolean value_type?: string order?: AggregationsAggregateOrder - script?: Script + script?: Script | string + shard_min_doc_count?: long shard_size?: integer show_term_doc_count_error?: boolean size?: integer @@ -4159,7 +4174,7 @@ export interface AggregationsTermsPartition { export interface AggregationsTestPopulation { field: Field - script?: Script + script?: Script | string filter?: QueryDslQueryContainer } @@ -4232,7 +4247,7 @@ export interface AggregationsVariableWidthHistogramAggregation { buckets?: integer shard_size?: integer initial_buffer?: integer - script?: Script + script?: Script | string } export interface AggregationsVariableWidthHistogramBucketKeys extends AggregationsMultiBucketBase { @@ -4256,7 +4271,7 @@ export interface AggregationsWeightedAverageAggregation { export interface AggregationsWeightedAverageValue { field?: Field missing?: double - script?: Script + script?: Script | string } export interface AggregationsWeightedAvgAggregate extends AggregationsSingleMetricAggregateBase { @@ -4304,7 +4319,7 @@ export interface AnalysisCompoundWordTokenFilterBase extends AnalysisTokenFilter export interface AnalysisConditionTokenFilter extends AnalysisTokenFilterBase { type: 'condition' filter: string[] - script: Script + script: Script | string } export interface AnalysisCustomAnalyzer { @@ -4699,7 +4714,7 @@ export interface AnalysisPorterStemTokenFilter extends AnalysisTokenFilterBase { export interface AnalysisPredicateTokenFilter extends AnalysisTokenFilterBase { type: 'predicate_token_filter' - script: Script + script: Script | string } export interface AnalysisRemoveDuplicatesTokenFilter extends AnalysisTokenFilterBase { @@ -4736,7 +4751,7 @@ export type AnalysisSnowballLanguage = 'Armenian' | 'Basque' | 'Catalan' | 'Dani export interface AnalysisSnowballTokenFilter extends AnalysisTokenFilterBase { type: 'snowball' - language: AnalysisSnowballLanguage + language?: AnalysisSnowballLanguage } export interface AnalysisStandardAnalyzer { @@ -4983,8 +4998,9 @@ export interface MappingDateRangeProperty extends MappingRangePropertyBase { export interface MappingDenseVectorIndexOptions { type: string - m: integer - ef_construction: integer + m?: integer + ef_construction?: integer + confidence_interval?: float } export interface MappingDenseVectorProperty extends MappingPropertyBase { @@ -5017,7 +5033,7 @@ export interface MappingDynamicProperty extends MappingDocValuesPropertyBase { null_value?: FieldValue boost?: double coerce?: boolean - script?: Script + script?: Script | string on_script_error?: MappingOnScriptError ignore_malformed?: boolean time_series_metric?: MappingTimeSeriesMetricType @@ -5095,7 +5111,7 @@ export interface MappingGeoPointProperty extends MappingDocValuesPropertyBase { null_value?: GeoLocation index?: boolean on_script_error?: MappingOnScriptError - script?: Script + script?: Script | string type: 'geo_point' } @@ -5161,7 +5177,8 @@ export interface MappingIpProperty extends MappingDocValuesPropertyBase { ignore_malformed?: boolean null_value?: string on_script_error?: MappingOnScriptError - script?: Script + script?: Script | string + time_series_dimension?: boolean type: 'ip' } @@ -5180,12 +5197,13 @@ export interface MappingKeywordProperty extends MappingDocValuesPropertyBase { eager_global_ordinals?: boolean index?: boolean index_options?: MappingIndexOptions - script?: Script + script?: Script | string on_script_error?: MappingOnScriptError normalizer?: string norms?: boolean null_value?: string split_queries_on_whitespace?: boolean + time_series_dimension?: boolean type: 'keyword' } @@ -5224,7 +5242,9 @@ export interface MappingNumberPropertyBase extends MappingDocValuesPropertyBase ignore_malformed?: boolean index?: boolean on_script_error?: MappingOnScriptError - script?: Script + script?: Script | string + time_series_metric?: MappingTimeSeriesMetricType + time_series_dimension?: boolean } export interface MappingObjectProperty extends MappingCorePropertyBase { @@ -5282,7 +5302,7 @@ export interface MappingRuntimeField { input_field?: Field target_field?: Field target_index?: IndexName - script?: Script + script?: Script | string type: MappingRuntimeFieldType } @@ -5411,6 +5431,7 @@ export interface MappingTypeMapping { runtime?: Record enabled?: boolean subobjects?: boolean + _data_stream_timestamp?: MappingDataStreamTimestamp } export interface MappingUnsignedLongNumberProperty extends MappingNumberPropertyBase { @@ -5424,6 +5445,7 @@ export interface MappingVersionProperty extends MappingDocValuesPropertyBase { export interface MappingWildcardProperty extends MappingDocValuesPropertyBase { type: 'wildcard' + null_value?: string } export interface QueryDslBoolQuery extends QueryDslQueryBase { @@ -5468,28 +5490,22 @@ export interface QueryDslConstantScoreQuery extends QueryDslQueryBase { filter: QueryDslQueryContainer } -export interface QueryDslDateDecayFunctionKeys extends QueryDslDecayFunctionBase { +export interface QueryDslDateDecayFunctionKeys extends QueryDslDecayFunctionBase { } export type QueryDslDateDecayFunction = QueryDslDateDecayFunctionKeys -& { [property: string]: QueryDslDecayPlacement | QueryDslMultiValueMode } +& { [property: string]: QueryDslDecayPlacement | QueryDslMultiValueMode } export interface QueryDslDateDistanceFeatureQuery extends QueryDslDistanceFeatureQueryBase { } -export interface QueryDslDateRangeQuery extends QueryDslRangeQueryBase { - gt?: DateMath - gte?: DateMath - lt?: DateMath - lte?: DateMath - from?: DateMath | null - to?: DateMath | null +export interface QueryDslDateRangeQuery extends QueryDslRangeQueryBase { format?: DateFormat time_zone?: TimeZone } -export type QueryDslDecayFunction = QueryDslDateDecayFunction | QueryDslNumericDecayFunction | QueryDslGeoDecayFunction +export type QueryDslDecayFunction = QueryDslUntypedDecayFunction | QueryDslDateDecayFunction | QueryDslNumericDecayFunction | QueryDslGeoDecayFunction -export interface QueryDslDecayFunctionBase { +export interface QueryDslDecayFunctionBase { multi_value_mode?: QueryDslMultiValueMode } @@ -5505,7 +5521,7 @@ export interface QueryDslDisMaxQuery extends QueryDslQueryBase { tie_breaker?: double } -export type QueryDslDistanceFeatureQuery = QueryDslGeoDistanceFeatureQuery | QueryDslDateDistanceFeatureQuery +export type QueryDslDistanceFeatureQuery = QueryDslUntypedDistanceFeatureQuery | QueryDslGeoDistanceFeatureQuery | QueryDslDateDistanceFeatureQuery export interface QueryDslDistanceFeatureQueryBase extends QueryDslQueryBase { origin: TOrigin @@ -5580,10 +5596,10 @@ export interface QueryDslGeoBoundingBoxQueryKeys extends QueryDslQueryBase { export type QueryDslGeoBoundingBoxQuery = QueryDslGeoBoundingBoxQueryKeys & { [property: string]: GeoBounds | QueryDslGeoExecution | QueryDslGeoValidationMethod | boolean | float | string } -export interface QueryDslGeoDecayFunctionKeys extends QueryDslDecayFunctionBase { +export interface QueryDslGeoDecayFunctionKeys extends QueryDslDecayFunctionBase { } export type QueryDslGeoDecayFunction = QueryDslGeoDecayFunctionKeys -& { [property: string]: QueryDslDecayPlacement | QueryDslMultiValueMode } +& { [property: string]: QueryDslDecayPlacement | QueryDslMultiValueMode } export interface QueryDslGeoDistanceFeatureQuery extends QueryDslDistanceFeatureQueryBase { } @@ -5676,7 +5692,7 @@ export interface QueryDslIntervalsFilter { not_containing?: QueryDslIntervalsContainer not_overlapping?: QueryDslIntervalsContainer overlapping?: QueryDslIntervalsContainer - script?: Script + script?: Script | string } export interface QueryDslIntervalsFuzzy { @@ -5831,19 +5847,13 @@ export interface QueryDslNestedQuery extends QueryDslQueryBase { score_mode?: QueryDslChildScoreMode } -export interface QueryDslNumberRangeQuery extends QueryDslRangeQueryBase { - gt?: double - gte?: double - lt?: double - lte?: double - from?: double | null - to?: double | null +export interface QueryDslNumberRangeQuery extends QueryDslRangeQueryBase { } -export interface QueryDslNumericDecayFunctionKeys extends QueryDslDecayFunctionBase { +export interface QueryDslNumericDecayFunctionKeys extends QueryDslDecayFunctionBase { } export type QueryDslNumericDecayFunction = QueryDslNumericDecayFunctionKeys -& { [property: string]: QueryDslDecayPlacement | QueryDslMultiValueMode } +& { [property: string]: QueryDslDecayPlacement | QueryDslMultiValueMode } export type QueryDslOperator = 'and' | 'AND' | 'or' | 'OR' @@ -5879,6 +5889,7 @@ export interface QueryDslPinnedQuery extends QueryDslQueryBase { export interface QueryDslPrefixQuery extends QueryDslQueryBase { rewrite?: MultiTermQueryRewrite value: string + case_insensitive?: boolean } export interface QueryDslQueryBase { @@ -5890,11 +5901,12 @@ export interface QueryDslQueryContainer { bool?: QueryDslBoolQuery boosting?: QueryDslBoostingQuery common?: Partial> + combined_fields?: QueryDslCombinedFieldsQuery constant_score?: QueryDslConstantScoreQuery dis_max?: QueryDslDisMaxQuery distance_feature?: QueryDslDistanceFeatureQuery exists?: QueryDslExistsQuery - function_score?: QueryDslFunctionScoreQuery + function_score?: QueryDslFunctionScoreQuery | QueryDslFunctionScoreContainer[] fuzzy?: Partial> geo_bounding_box?: QueryDslGeoBoundingBoxQuery geo_distance?: QueryDslGeoDistanceQuery @@ -5922,9 +5934,10 @@ export interface QueryDslQueryContainer { range?: Partial> rank_feature?: QueryDslRankFeatureQuery regexp?: Partial> - rule_query?: QueryDslRuleQuery + rule?: QueryDslRuleQuery script?: QueryDslScriptQuery script_score?: QueryDslScriptScoreQuery + semantic?: QueryDslSemanticQuery shape?: QueryDslShapeQuery simple_query_string?: QueryDslSimpleQueryStringQuery span_containing?: QueryDslSpanContainingQuery @@ -5936,9 +5949,12 @@ export interface QueryDslQueryContainer { span_or?: QueryDslSpanOrQuery span_term?: Partial> span_within?: QueryDslSpanWithinQuery + sparse_vector?: QueryDslSparseVectorQuery term?: Partial> terms?: QueryDslTermsQuery terms_set?: Partial> + text_expansion?: Partial> + weighted_tokens?: Partial> wildcard?: Partial> wrapper?: QueryDslWrapperQuery type?: QueryDslTypeQuery @@ -5977,10 +5993,16 @@ export interface QueryDslRandomScoreFunction { seed?: long | string } -export type QueryDslRangeQuery = QueryDslDateRangeQuery | QueryDslNumberRangeQuery | QueryDslTermsRangeQuery +export type QueryDslRangeQuery = QueryDslUntypedRangeQuery | QueryDslDateRangeQuery | QueryDslNumberRangeQuery | QueryDslTermRangeQuery -export interface QueryDslRangeQueryBase extends QueryDslQueryBase { +export interface QueryDslRangeQueryBase extends QueryDslQueryBase { relation?: QueryDslRangeRelation + gt?: T + gte?: T + lt?: T + lte?: T + from?: T | null + to?: T | null } export type QueryDslRangeRelation = 'within' | 'contains' | 'intersects' @@ -6013,6 +6035,7 @@ export interface QueryDslRankFeatureQuery extends QueryDslQueryBase { } export interface QueryDslRegexpQuery extends QueryDslQueryBase { + case_insensitive?: boolean flags?: string max_determinized_states?: integer rewrite?: MultiTermQueryRewrite @@ -6021,22 +6044,22 @@ export interface QueryDslRegexpQuery extends QueryDslQueryBase { export interface QueryDslRuleQuery extends QueryDslQueryBase { organic: QueryDslQueryContainer - ruleset_id: Id + ruleset_ids: Id[] match_criteria: any } export interface QueryDslScriptQuery extends QueryDslQueryBase { - script: Script + script: Script | string } export interface QueryDslScriptScoreFunction { - script: Script + script: Script | string } export interface QueryDslScriptScoreQuery extends QueryDslQueryBase { min_score?: float query: QueryDslQueryContainer - script: Script + script: Script | string } export interface QueryDslSemanticQuery extends QueryDslQueryBase { @@ -6137,8 +6160,21 @@ export interface QueryDslSpanWithinQuery extends QueryDslQueryBase { little: QueryDslSpanQuery } +export interface QueryDslSparseVectorQuery extends QueryDslQueryBase { + field: Field + query_vector?: Record + inference_id?: Id + query?: string + prune?: boolean + pruning_config?: QueryDslTokenPruningConfig +} + export interface QueryDslTermQuery extends QueryDslQueryBase { value: FieldValue + case_insensitive?: boolean +} + +export interface QueryDslTermRangeQuery extends QueryDslRangeQueryBase { } export interface QueryDslTermsLookup { @@ -6155,24 +6191,16 @@ export type QueryDslTermsQuery = QueryDslTermsQueryKeys export type QueryDslTermsQueryField = FieldValue[] | QueryDslTermsLookup -export interface QueryDslTermsRangeQuery extends QueryDslRangeQueryBase { - gt?: string - gte?: string - lt?: string - lte?: string - from?: string | null - to?: string | null -} - export interface QueryDslTermsSetQuery extends QueryDslQueryBase { minimum_should_match_field?: Field - minimum_should_match_script?: Script + minimum_should_match_script?: Script | string terms: string[] } export interface QueryDslTextExpansionQuery extends QueryDslQueryBase { model_id: string model_text: string + pruning_config?: QueryDslTokenPruningConfig } export type QueryDslTextQueryType = 'best_fields' | 'most_fields' | 'cross_fields' | 'phrase' | 'phrase_prefix' | 'bool_prefix' @@ -6187,12 +6215,26 @@ export interface QueryDslTypeQuery extends QueryDslQueryBase { value: string } +export interface QueryDslUntypedDecayFunctionKeys extends QueryDslDecayFunctionBase { +} +export type QueryDslUntypedDecayFunction = QueryDslUntypedDecayFunctionKeys +& { [property: string]: QueryDslDecayPlacement | QueryDslMultiValueMode } + +export interface QueryDslUntypedDistanceFeatureQuery extends QueryDslDistanceFeatureQueryBase { +} + +export interface QueryDslUntypedRangeQuery extends QueryDslRangeQueryBase { + format?: DateFormat + time_zone?: TimeZone +} + export interface QueryDslWeightedTokensQuery extends QueryDslQueryBase { tokens: Record pruning_config?: QueryDslTokenPruningConfig } export interface QueryDslWildcardQuery extends QueryDslQueryBase { + case_insensitive?: boolean rewrite?: MultiTermQueryRewrite value?: string wildcard?: string @@ -6311,6 +6353,7 @@ export interface AsyncSearchSubmitRequest extends RequestBase { track_total_hits?: SearchTrackHits indices_boost?: Record[] docvalue_fields?: (QueryDslFieldAndFormat | Field)[] + knn?: KnnSearch | KnnSearch[] min_score?: double post_filter?: QueryDslQueryContainer profile?: boolean @@ -6448,28 +6491,39 @@ export interface CatAliasesRequest extends CatCatRequestBase { export type CatAliasesResponse = CatAliasesAliasesRecord[] export interface CatAllocationAllocationRecord { - shards?: string - s?: string - 'disk.indices'?: ByteSize | null - di?: ByteSize | null - diskIndices?: ByteSize | null - 'disk.used'?: ByteSize | null - du?: ByteSize | null - diskUsed?: ByteSize | null - 'disk.avail'?: ByteSize | null - da?: ByteSize | null - diskAvail?: ByteSize | null - 'disk.total'?: ByteSize | null - dt?: ByteSize | null - diskTotal?: ByteSize | null - 'disk.percent'?: Percentage | null - dp?: Percentage | null - diskPercent?: Percentage | null - host?: Host | null - h?: Host | null - ip?: Ip | null - node?: string - n?: string + shards: string + s: string + 'shards.undesired': string | null + 'write_load.forecast': double | null + wlf: double | null + writeLoadForecast: double | null + 'disk.indices.forecast': ByteSize | null + dif: ByteSize | null + diskIndicesForecast: ByteSize | null + 'disk.indices': ByteSize | null + di: ByteSize | null + diskIndices: ByteSize | null + 'disk.used': ByteSize | null + du: ByteSize | null + diskUsed: ByteSize | null + 'disk.avail': ByteSize | null + da: ByteSize | null + diskAvail: ByteSize | null + 'disk.total': ByteSize | null + dt: ByteSize | null + diskTotal: ByteSize | null + 'disk.percent': Percentage | null + dp: Percentage | null + diskPercent: Percentage | null + host: Host | null + h: Host | null + ip: Ip | null + node: string + n: string + 'node.role': string | null + r: string | null + role: string | null + nodeRole: string | null } export interface CatAllocationRequest extends CatCatRequestBase { @@ -7261,6 +7315,7 @@ export interface CatMlTrainedModelsTrainedModelsRecord { 'data_frame.analysis'?: string dfa?: string dataFrameAnalyticsAnalysis?: string + type?: string } export interface CatNodeattrsNodeAttributesRecord { @@ -8328,6 +8383,7 @@ export interface CcrGetAutoFollowPatternAutoFollowPatternSummary { remote_cluster: string follow_index_pattern?: IndexPattern leader_index_patterns: IndexPatterns + leader_index_exclusion_patterns: IndexPatterns max_outstanding_read_requests: integer } @@ -8449,6 +8505,7 @@ export interface ClusterComponentTemplateSummary { settings?: Record mappings?: MappingTypeMapping aliases?: Record + lifecycle?: IndicesDataStreamLifecycleWithRollover } export interface ClusterAllocationExplainAllocationDecision { @@ -8556,6 +8613,7 @@ export interface ClusterAllocationExplainResponse { remaining_delay_in_millis?: DurationValue shard: integer unassigned_info?: ClusterAllocationExplainUnassignedInformation + note?: string } export interface ClusterAllocationExplainUnassignedInformation { @@ -8595,6 +8653,7 @@ export type ClusterExistsComponentTemplateResponse = boolean export interface ClusterGetComponentTemplateRequest extends RequestBase { name?: Name flat_settings?: boolean + include_defaults?: boolean local?: boolean master_timeout?: Duration } @@ -8943,6 +9002,7 @@ export interface ClusterStatsClusterNodeCount { data: integer data_cold: integer data_content: integer + data_frozen?: integer data_hot: integer data_warm: integer ingest: integer @@ -8958,6 +9018,7 @@ export interface ClusterStatsClusterNodes { count: ClusterStatsClusterNodeCount discovery_types: Record fs: ClusterStatsClusterFileSystem + indexing_pressure: ClusterStatsIndexingPressure ingest: ClusterStatsClusterIngest jvm: ClusterStatsClusterJvm network_types: ClusterStatsClusterNetworkTypes @@ -9028,6 +9089,7 @@ export interface ClusterStatsFieldTypes { indexed_vector_count?: long indexed_vector_dim_max?: long indexed_vector_dim_min?: long + script_count?: integer } export interface ClusterStatsFieldTypesMappings { @@ -9074,6 +9136,7 @@ export interface ClusterStatsNodePackagingType { } export interface ClusterStatsOperatingSystemMemoryInfo { + adjusted_total_in_bytes?: long free_in_bytes: long free_percent: integer total_in_bytes: long @@ -9117,14 +9180,15 @@ export interface ClusterStatsStatsResponseBase extends NodesNodesResponseBase { export interface ConnectorConnector { api_key_id?: string + api_key_secret_id?: string configuration: ConnectorConnectorConfiguration custom_scheduling: ConnectorConnectorCustomScheduling description?: string - error?: string + error?: string | null features?: ConnectorConnectorFeatures filtering: ConnectorFilteringConfig[] id?: Id - index_name?: IndexName + index_name?: IndexName | null is_native: boolean language?: string last_access_control_sync_error?: string @@ -9141,8 +9205,9 @@ export interface ConnectorConnector { name?: string pipeline?: ConnectorIngestPipelineParams scheduling: ConnectorSchedulingConfiguration - service_type: string + service_type?: string status: ConnectorConnectorStatus + sync_cursor?: any sync_now: boolean } @@ -9157,11 +9222,11 @@ export interface ConnectorConnectorConfigProperties { placeholder?: string required: boolean sensitive: boolean - tooltip?: string + tooltip?: string | null type: ConnectorConnectorFieldType ui_restrictions: string[] validations: ConnectorValidation[] - value: ScalarValue + value: any } export type ConnectorConnectorConfiguration = Record @@ -9170,9 +9235,8 @@ export type ConnectorConnectorCustomScheduling = Record + last_access_control_sync_error?: string last_access_control_sync_scheduled_at?: DateTime last_access_control_sync_status?: ConnectorSyncStatus last_deleted_document_count?: long last_incremental_sync_scheduled_at?: DateTime last_indexed_document_count?: long - last_seen?: SpecUtilsWithNullValue - last_sync_error?: SpecUtilsWithNullValue + last_seen?: DateTime + last_sync_error?: string last_sync_scheduled_at?: DateTime last_sync_status?: ConnectorSyncStatus last_synced?: DateTime + sync_cursor?: any } } @@ -9405,7 +9471,7 @@ export interface ConnectorPostRequest extends RequestBase { /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ body?: { description?: string - index_name: SpecUtilsWithNullValue + index_name?: IndexName is_native?: boolean language?: string name?: string @@ -9414,15 +9480,16 @@ export interface ConnectorPostRequest extends RequestBase { } export interface ConnectorPostResponse { + result: Result id: Id } export interface ConnectorPutRequest extends RequestBase { - connector_id: Id + connector_id?: Id /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ body?: { description?: string - index_name: SpecUtilsWithNullValue + index_name?: IndexName is_native?: boolean language?: string name?: string @@ -9432,6 +9499,7 @@ export interface ConnectorPutRequest extends RequestBase { export interface ConnectorPutResponse { result: Result + id: Id } export interface ConnectorSyncJobCancelRequest extends RequestBase { @@ -9459,7 +9527,7 @@ export interface ConnectorSyncJobListRequest extends RequestBase { size?: integer status?: ConnectorSyncStatus connector_id?: Id - job_type?: ConnectorSyncJobType[] + job_type?: ConnectorSyncJobType | ConnectorSyncJobType[] } export interface ConnectorSyncJobListResponse { @@ -9492,8 +9560,8 @@ export interface ConnectorUpdateApiKeyIdRequest extends RequestBase { connector_id: Id /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ body?: { - api_key_id?: SpecUtilsWithNullValue - api_key_secret_id?: SpecUtilsWithNullValue + api_key_id?: string + api_key_secret_id?: string } } @@ -9568,7 +9636,7 @@ export interface ConnectorUpdateNameRequest extends RequestBase { connector_id: Id /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ body?: { - name: string + name?: string description?: string } } @@ -9753,6 +9821,7 @@ export interface EnrichStatsRequest extends RequestBase { export interface EnrichStatsResponse { coordinator_stats: EnrichStatsCoordinatorStats[] executing_policies: EnrichStatsExecutingPolicy[] + cache_stats?: EnrichStatsCacheStats[] } export interface EqlEqlHits { @@ -9774,12 +9843,13 @@ export interface EqlHitsEvent { _index: IndexName _id: Id _source: TEvent + missing?: boolean fields?: Record } export interface EqlHitsSequence { events: EqlHitsEvent[] - join_keys: any[] + join_keys?: any[] } export interface EqlDeleteRequest extends RequestBase { @@ -9829,6 +9899,7 @@ export interface EqlSearchRequest extends RequestBase { size?: uint fields?: QueryDslFieldAndFormat | Field | (QueryDslFieldAndFormat | Field)[] result_position?: EqlSearchResultPosition + runtime_mappings?: MappingRuntimeFields } } @@ -9836,16 +9907,34 @@ export type EqlSearchResponse = EqlEqlSearchResponseBase> } } @@ -10077,6 +10166,7 @@ export interface IlmForceMergeConfiguration { export interface IlmPhase { actions?: IlmActions min_age?: Duration | long + configurations?: IlmConfigurations } export interface IlmPhases { @@ -10268,6 +10358,7 @@ export interface IndicesAliasDefinition { is_write_index?: boolean routing?: string search_routing?: string + is_hidden?: boolean } export interface IndicesCacheQueries { @@ -10283,9 +10374,11 @@ export interface IndicesDataStream { next_generation_managed_by: IndicesManagedBy prefer_ilm: boolean indices: IndicesDataStreamIndex[] + lifecycle?: IndicesDataStreamLifecycleWithRollover name: DataStreamName replicated?: boolean status: HealthStatus + system?: boolean template: Name timestamp_field: IndicesDataStreamTimestampField } @@ -10492,6 +10585,7 @@ export interface IndicesIndexState { settings?: IndicesIndexSettings defaults?: IndicesIndexSettings data_stream?: DataStreamName + lifecycle?: IndicesDataStreamLifecycle } export interface IndicesIndexTemplate { @@ -10514,6 +10608,7 @@ export interface IndicesIndexTemplateSummary { aliases?: Record mappings?: MappingTypeMapping settings?: IndicesIndexSettings + lifecycle?: IndicesDataStreamLifecycleWithRollover } export interface IndicesIndexVersioning { @@ -10575,6 +10670,7 @@ export interface IndicesMappingLimitSettingsNestedObjects { export interface IndicesMappingLimitSettingsTotalFields { limit?: long + ignore_dynamic_beyond_limit?: boolean } export interface IndicesMerge { @@ -10671,8 +10767,8 @@ export interface IndicesSettingsSimilarityLmj { export interface IndicesSettingsSimilarityScripted { type: 'scripted' - script: Script - weight_script?: Script + script: Script | string + weight_script?: Script | string } export interface IndicesSlowlogSettings { @@ -11155,6 +11251,7 @@ export interface IndicesGetRequest extends RequestBase { include_defaults?: boolean local?: boolean master_timeout?: Duration + features?: IndicesGetFeatures } export type IndicesGetResponse = Record @@ -11192,6 +11289,7 @@ export interface IndicesGetDataLifecycleResponse { export interface IndicesGetDataStreamRequest extends RequestBase { name?: DataStreamNames expand_wildcards?: ExpandWildcards + include_defaults?: boolean } export interface IndicesGetDataStreamResponse { @@ -11224,6 +11322,7 @@ export interface IndicesGetIndexTemplateRequest extends RequestBase { local?: boolean flat_settings?: boolean master_timeout?: Duration + include_defaults?: boolean } export interface IndicesGetIndexTemplateResponse { @@ -11350,6 +11449,7 @@ export interface IndicesPutIndexTemplateIndexTemplateMapping { aliases?: Record mappings?: MappingTypeMapping settings?: IndicesIndexSettings + lifecycle?: IndicesDataStreamLifecycle } export interface IndicesPutIndexTemplateRequest extends RequestBase { @@ -11770,6 +11870,7 @@ export interface IndicesShrinkResponse { export interface IndicesSimulateIndexTemplateRequest extends RequestBase { name: Name master_timeout?: Duration + include_defaults?: boolean } export interface IndicesSimulateIndexTemplateResponse { @@ -11786,6 +11887,7 @@ export interface IndicesSimulateTemplateRequest extends RequestBase { name?: Name create?: boolean master_timeout?: Duration + include_defaults?: boolean /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ body?: { allow_auto_create?: boolean @@ -11852,6 +11954,7 @@ export interface IndicesStatsIndexStats { translog?: TranslogStats warmer?: WarmerStats bulk?: BulkStats + shard_stats?: IndicesStatsShardsTotalStats } export interface IndicesStatsIndicesStats { @@ -11859,6 +11962,8 @@ export interface IndicesStatsIndicesStats { shards?: Record total?: IndicesStatsIndexStats uuid?: Uuid + health?: HealthStatus + status?: IndicesStatsIndexMetadataState } export interface IndicesStatsMappingStats { @@ -11971,6 +12076,7 @@ export interface IndicesStatsShardStats { translog?: TranslogStats warmer?: WarmerStats bulk?: BulkStats + shards?: Record shard_stats?: IndicesStatsShardsTotalStats indices?: IndicesStatsIndicesStats } @@ -12388,41 +12494,41 @@ export interface IngestProcessorBase { } export interface IngestProcessorContainer { - attachment?: IngestAttachmentProcessor append?: IngestAppendProcessor - csv?: IngestCsvProcessor + attachment?: IngestAttachmentProcessor + bytes?: IngestBytesProcessor + circle?: IngestCircleProcessor convert?: IngestConvertProcessor + csv?: IngestCsvProcessor date?: IngestDateProcessor date_index_name?: IngestDateIndexNameProcessor + dissect?: IngestDissectProcessor dot_expander?: IngestDotExpanderProcessor + drop?: IngestDropProcessor enrich?: IngestEnrichProcessor fail?: IngestFailProcessor foreach?: IngestForeachProcessor - json?: IngestJsonProcessor - user_agent?: IngestUserAgentProcessor - kv?: IngestKeyValueProcessor geoip?: IngestGeoIpProcessor grok?: IngestGrokProcessor gsub?: IngestGsubProcessor + inference?: IngestInferenceProcessor join?: IngestJoinProcessor + json?: IngestJsonProcessor + kv?: IngestKeyValueProcessor lowercase?: IngestLowercaseProcessor + pipeline?: IngestPipelineProcessor remove?: IngestRemoveProcessor rename?: IngestRenameProcessor reroute?: IngestRerouteProcessor script?: IngestScriptProcessor set?: IngestSetProcessor + set_security_user?: IngestSetSecurityUserProcessor sort?: IngestSortProcessor split?: IngestSplitProcessor trim?: IngestTrimProcessor uppercase?: IngestUppercaseProcessor urldecode?: IngestUrlDecodeProcessor - bytes?: IngestBytesProcessor - dissect?: IngestDissectProcessor - set_security_user?: IngestSetSecurityUserProcessor - pipeline?: IngestPipelineProcessor - drop?: IngestDropProcessor - circle?: IngestCircleProcessor - inference?: IngestInferenceProcessor + user_agent?: IngestUserAgentProcessor } export interface IngestRemoveProcessor extends IngestProcessorBase { @@ -13465,6 +13571,14 @@ export type MlInclude = 'definition' | 'feature_importance_baseline' | 'hyperpar export interface MlInferenceConfigCreateContainer { regression?: MlRegressionInferenceOptions classification?: MlClassificationInferenceOptions + text_classification?: MlTextClassificationInferenceOptions + zero_shot_classification?: MlZeroShotClassificationInferenceOptions + fill_mask?: MlFillMaskInferenceOptions + ner?: MlNerInferenceOptions + pass_through?: MlPassThroughInferenceOptions + text_embedding?: MlTextEmbeddingInferenceOptions + text_expansion?: MlTextExpansionInferenceOptions + question_answering?: MlQuestionAnsweringInferenceOptions } export interface MlInferenceConfigUpdateContainer { @@ -13608,7 +13722,9 @@ export interface MlJobTimingStats { export type MlMemoryStatus = 'ok' | 'soft_limit' | 'hard_limit' export interface MlModelPlotConfig { + annotations_enabled?: boolean enabled?: boolean + terms?: Field } export interface MlModelSizeStats { @@ -13816,6 +13932,8 @@ export interface MlTimingStats { export interface MlTokenizationConfigContainer { bert?: MlNlpBertTokenizationConfig + mpnet?: MlNlpBertTokenizationConfig + roberta?: MlNlpRobertaTokenizationConfig } export type MlTokenizationTruncate = 'first' | 'second' | 'none' @@ -13862,6 +13980,7 @@ export interface MlTrainedModelAssignmentTaskParameters { model_bytes: integer model_id: Id deployment_id: Id + cache_size: ByteSize number_of_allocations: integer priority: MlTrainingPriority queue_capacity: integer @@ -13981,7 +14100,7 @@ export interface MlTrainedModelPrefixStrings { export interface MlTrainedModelSizeStats { model_size_bytes: ByteSize - required_native_memory_bytes: integer + required_native_memory_bytes: ByteSize } export interface MlTrainedModelStats { @@ -14807,6 +14926,8 @@ export interface MlPutDataFrameAnalyticsRequest extends RequestBase { max_num_threads?: integer model_memory_limit?: string source: MlDataframeAnalyticsSource + headers?: HttpHeaders + version?: VersionString } } @@ -14848,6 +14969,7 @@ export interface MlPutDatafeedRequest extends RequestBase { runtime_mappings?: MappingRuntimeFields script_fields?: Record scroll_size?: integer + headers?: HttpHeaders } } @@ -14972,6 +15094,8 @@ export interface MlPutTrainedModelPreprocessor { export interface MlPutTrainedModelRequest extends RequestBase { model_id: Id + defer_definition_decompression?: boolean + wait_for_completion?: boolean /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ body?: { compressed_definition?: string @@ -14984,6 +15108,7 @@ export interface MlPutTrainedModelRequest extends RequestBase { model_size_bytes?: long platform_architecture?: string tags?: string[] + prefix_strings?: MlTrainedModelPrefixStrings } } @@ -15051,6 +15176,8 @@ export interface MlPutTrainedModelVocabularyRequest extends RequestBase { /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ body?: { vocabulary: string[] + merges?: string[] + scores?: double[] } } @@ -16037,6 +16164,7 @@ export interface NodesInfoNodeInfoSettingsCluster { routing?: IndicesIndexRouting election: NodesInfoNodeInfoSettingsClusterElection initial_master_nodes?: string[] + deprecation_indexing?: NodesInfoDeprecationIndexing } export interface NodesInfoNodeInfoSettingsClusterElection { @@ -16284,69 +16412,101 @@ export interface NodesUsageResponseBase extends NodesNodesResponseBase { nodes: Record } -export interface QueryRulesetQueryRule { +export interface QueryRulesQueryRule { rule_id: Id - type: QueryRulesetQueryRuleType - criteria: QueryRulesetQueryRuleCriteria[] - actions: QueryRulesetQueryRuleActions + type: QueryRulesQueryRuleType + criteria: QueryRulesQueryRuleCriteria | QueryRulesQueryRuleCriteria[] + actions: QueryRulesQueryRuleActions + priority?: integer } -export interface QueryRulesetQueryRuleActions { +export interface QueryRulesQueryRuleActions { ids?: Id[] docs?: QueryDslPinnedDoc[] } -export interface QueryRulesetQueryRuleCriteria { - type: QueryRulesetQueryRuleCriteriaType - metadata: string +export interface QueryRulesQueryRuleCriteria { + type: QueryRulesQueryRuleCriteriaType + metadata?: string values?: any[] } -export type QueryRulesetQueryRuleCriteriaType = 'global' | 'exact' | 'exact_fuzzy' | 'prefix' | 'suffix' | 'contains' | 'lt' | 'lte' | 'gt' | 'gte' +export type QueryRulesQueryRuleCriteriaType = 'global' | 'exact' | 'exact_fuzzy' | 'fuzzy' | 'prefix' | 'suffix' | 'contains' | 'lt' | 'lte' | 'gt' | 'gte' | 'always' -export type QueryRulesetQueryRuleType = 'pinned' +export type QueryRulesQueryRuleType = 'pinned' -export interface QueryRulesetQueryRuleset { +export interface QueryRulesQueryRuleset { ruleset_id: Id - rules: QueryRulesetQueryRule[] + rules: QueryRulesQueryRule[] } -export interface QueryRulesetDeleteRequest extends RequestBase { +export interface QueryRulesDeleteRuleRequest extends RequestBase { ruleset_id: Id + rule_id: Id +} + +export type QueryRulesDeleteRuleResponse = AcknowledgedResponseBase + +export interface QueryRulesDeleteRulesetRequest extends RequestBase { + ruleset_id: Id +} + +export type QueryRulesDeleteRulesetResponse = AcknowledgedResponseBase + +export interface QueryRulesGetRuleRequest extends RequestBase { + ruleset_id: Id + rule_id: Id } -export type QueryRulesetDeleteResponse = AcknowledgedResponseBase +export type QueryRulesGetRuleResponse = QueryRulesQueryRule -export interface QueryRulesetGetRequest extends RequestBase { +export interface QueryRulesGetRulesetRequest extends RequestBase { ruleset_id: Id } -export type QueryRulesetGetResponse = QueryRulesetQueryRuleset +export type QueryRulesGetRulesetResponse = QueryRulesQueryRuleset -export interface QueryRulesetListQueryRulesetListItem { +export interface QueryRulesListRulesetsQueryRulesetListItem { ruleset_id: Id - rules_count: integer + rule_total_count: integer + rule_criteria_types_counts: Record } -export interface QueryRulesetListRequest extends RequestBase { +export interface QueryRulesListRulesetsRequest extends RequestBase { from?: integer size?: integer } -export interface QueryRulesetListResponse { +export interface QueryRulesListRulesetsResponse { count: long - results: QueryRulesetListQueryRulesetListItem[] + results: QueryRulesListRulesetsQueryRulesetListItem[] +} + +export interface QueryRulesPutRuleRequest extends RequestBase { + ruleset_id: Id + rule_id: Id + /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ + body?: { + type: QueryRulesQueryRuleType + criteria: QueryRulesQueryRuleCriteria | QueryRulesQueryRuleCriteria[] + actions: QueryRulesQueryRuleActions + priority?: integer + } +} + +export interface QueryRulesPutRuleResponse { + result: Result } -export interface QueryRulesetPutRequest extends RequestBase { +export interface QueryRulesPutRulesetRequest extends RequestBase { ruleset_id: Id /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ body?: { - rules: QueryRulesetQueryRule[] + rules: QueryRulesQueryRule | QueryRulesQueryRule[] } } -export interface QueryRulesetPutResponse { +export interface QueryRulesPutRulesetResponse { result: Result } @@ -16560,7 +16720,7 @@ export interface SearchApplicationSearchApplication { } export interface SearchApplicationSearchApplicationTemplate { - script: InlineScript | string + script: Script | string } export interface SearchApplicationDeleteRequest extends RequestBase { @@ -16628,6 +16788,7 @@ export type SearchApplicationPutBehavioralAnalyticsResponse = SearchApplicationP export interface SearchApplicationSearchRequest extends RequestBase { name: Name + typed_keys?: boolean /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ body?: { params?: Record @@ -16715,8 +16876,12 @@ export interface SecurityApiKey { invalidated?: boolean name: Name realm?: string + realm_type?: string username?: Username + profile_uid?: string + metadata?: Metadata role_descriptors?: Record + limited_by?: Record[] _sort?: SortResults } @@ -16730,11 +16895,16 @@ export interface SecurityApplicationPrivileges { resources: string[] } +export interface SecurityBulkError { + count: integer + details: Record +} + export interface SecurityClusterNode { name: Name } -export type SecurityClusterPrivilege = 'all' | 'cancel_task' | 'create_snapshot' | 'cross_cluster_replication' | 'cross_cluster_search' | 'delegate_pki' | 'grant_api_key' | 'manage' | 'manage_api_key' | 'manage_autoscaling' | 'manage_behavioral_analytics' | 'manage_ccr' | 'manage_data_frame_transforms' | 'manage_data_stream_global_retention' | 'manage_enrich' | 'manage_ilm' | 'manage_index_templates' | 'manage_inference' | 'manage_ingest_pipelines' | 'manage_logstash_pipelines' | 'manage_ml' | 'manage_oidc' | 'manage_own_api_key' | 'manage_pipeline' | 'manage_rollup' | 'manage_saml' | 'manage_search_application' | 'manage_search_query_rules' | 'manage_search_synonyms' | 'manage_security' | 'manage_service_account' | 'manage_slm' | 'manage_token' | 'manage_transform' | 'manage_user_profile' | 'manage_watcher' | 'monitor' | 'monitor_data_frame_transforms' | 'monitor_data_stream_global_retention' | 'monitor_enrich' | 'monitor_inference' | 'monitor_ml' | 'monitor_rollup' | 'monitor_snapshot' | 'monitor_text_structure' | 'monitor_transform' | 'monitor_watcher' | 'none' | 'post_behavioral_analytics_event' | 'read_ccr' | 'read_connector_secrets' | 'read_fleet_secrets' | 'read_ilm' | 'read_pipeline' | 'read_security' | 'read_slm' | 'transport_client' | 'write_connector_secrets' | 'write_fleet_secrets' | string +export type SecurityClusterPrivilege = 'all' | 'cancel_task' | 'create_snapshot' | 'cross_cluster_replication' | 'cross_cluster_search' | 'delegate_pki' | 'grant_api_key' | 'manage' | 'manage_api_key' | 'manage_autoscaling' | 'manage_behavioral_analytics' | 'manage_ccr' | 'manage_data_frame_transforms' | 'manage_data_stream_global_retention' | 'manage_enrich' | 'manage_ilm' | 'manage_index_templates' | 'manage_inference' | 'manage_ingest_pipelines' | 'manage_logstash_pipelines' | 'manage_ml' | 'manage_oidc' | 'manage_own_api_key' | 'manage_pipeline' | 'manage_rollup' | 'manage_saml' | 'manage_search_application' | 'manage_search_query_rules' | 'manage_search_synonyms' | 'manage_security' | 'manage_service_account' | 'manage_slm' | 'manage_token' | 'manage_transform' | 'manage_user_profile' | 'manage_watcher' | 'monitor' | 'monitor_data_frame_transforms' | 'monitor_data_stream_global_retention' | 'monitor_enrich' | 'monitor_inference' | 'monitor_ml' | 'monitor_rollup' | 'monitor_snapshot' | 'monitor_text_structure' | 'monitor_transform' | 'monitor_watcher' | 'none' | 'post_behavioral_analytics_event' | 'read_ccr' | 'read_fleet_secrets' | 'read_ilm' | 'read_pipeline' | 'read_security' | 'read_slm' | 'transport_client' | 'write_connector_secrets' | 'write_fleet_secrets' | string export interface SecurityCreatedStatus { created: boolean @@ -16778,24 +16948,24 @@ export interface SecurityRealmInfo { } export interface SecurityRoleDescriptor { - cluster?: string[] + cluster?: SecurityClusterPrivilege[] indices?: SecurityIndicesPrivileges[] index?: SecurityIndicesPrivileges[] - global?: SecurityGlobalPrivilege[] | SecurityGlobalPrivilege applications?: SecurityApplicationPrivileges[] metadata?: Metadata run_as?: string[] + description?: string transient_metadata?: Record } export interface SecurityRoleDescriptorRead { - cluster: string[] + cluster: SecurityClusterPrivilege[] indices: SecurityIndicesPrivileges[] index: SecurityIndicesPrivileges[] - global?: SecurityGlobalPrivilege[] | SecurityGlobalPrivilege applications?: SecurityApplicationPrivileges[] metadata?: Metadata run_as?: string[] + description?: string transient_metadata?: Record } @@ -16816,22 +16986,22 @@ export interface SecurityRoleMappingRule { export interface SecurityRoleTemplate { format?: SecurityTemplateFormat - template: Script + template: Script | string } export type SecurityRoleTemplateInlineQuery = string | QueryDslQueryContainer -export interface SecurityRoleTemplateInlineScript extends ScriptBase { - lang?: ScriptLanguage - options?: Record - source: SecurityRoleTemplateInlineQuery -} - export interface SecurityRoleTemplateQuery { - template?: SecurityRoleTemplateScript + template?: SecurityRoleTemplateScript | SecurityRoleTemplateInlineQuery } -export type SecurityRoleTemplateScript = SecurityRoleTemplateInlineScript | SecurityRoleTemplateInlineQuery | StoredScriptId +export interface SecurityRoleTemplateScript { + source?: SecurityRoleTemplateInlineQuery + id?: Id + params?: Record + lang?: ScriptLanguage + options?: Record +} export type SecurityTemplateFormat = 'string' | 'json' @@ -16908,10 +17078,41 @@ export interface SecurityAuthenticateResponse { username: Username enabled: boolean authentication_type: string + token?: SecurityAuthenticateToken } export interface SecurityAuthenticateToken { name: Name + type?: string +} + +export interface SecurityBulkDeleteRoleRequest extends RequestBase { + refresh?: Refresh + /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ + body?: { + names: string[] + } +} + +export interface SecurityBulkDeleteRoleResponse { + deleted?: string[] + not_found?: string[] + errors?: SecurityBulkError +} + +export interface SecurityBulkPutRoleRequest extends RequestBase { + refresh?: Refresh + /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ + body?: { + roles: Record + } +} + +export interface SecurityBulkPutRoleResponse { + created?: string[] + updated?: string[] + noop?: string[] + errors?: SecurityBulkError } export interface SecurityChangePasswordRequest extends RequestBase { @@ -16987,6 +17188,7 @@ export interface SecurityCreateApiKeyRequest extends RequestBase { expiration?: Duration name?: Name role_descriptors?: Record + metadata?: Metadata } } @@ -16995,6 +17197,7 @@ export interface SecurityCreateApiKeyResponse { expiration?: long id: Id name: Name + encoded: string } export interface SecurityCreateServiceTokenRequest extends RequestBase { @@ -17125,6 +17328,9 @@ export interface SecurityGetApiKeyRequest extends RequestBase { owner?: boolean realm_name?: Name username?: Username + with_limited_by?: boolean + active_only?: boolean + with_profile_uid?: boolean } export interface SecurityGetApiKeyResponse { @@ -17160,6 +17366,7 @@ export interface SecurityGetRoleRole { transient_metadata?: Record applications: SecurityApplicationPrivileges[] role_templates?: SecurityRoleTemplate[] + global?: Record>> } export interface SecurityGetRoleMappingRequest extends RequestBase { @@ -17243,6 +17450,7 @@ export interface SecurityGetTokenUserRealm { export interface SecurityGetUserRequest extends RequestBase { username?: Username | Username[] + with_profile_uid?: boolean } export type SecurityGetUserResponse = Record @@ -17426,6 +17634,7 @@ export interface SecurityPutRoleRequest extends RequestBase { indices?: SecurityIndicesPrivileges[] metadata?: Metadata run_as?: string[] + description?: string transient_metadata?: Record } } @@ -17512,6 +17721,9 @@ export interface SecurityQueryApiKeysApiKeyQueryContainer { } export interface SecurityQueryApiKeysRequest extends RequestBase { + with_limited_by?: boolean + with_profile_uid?: boolean + typed_keys?: boolean /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ body?: { aggregations?: Record @@ -17532,6 +17744,78 @@ export interface SecurityQueryApiKeysResponse { aggregations?: Record } +export interface SecurityQueryRoleQueryRole extends SecurityRoleDescriptor { + _sort?: SortResults + name: string +} + +export interface SecurityQueryRoleRequest extends RequestBase { + /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ + body?: { + query?: SecurityQueryRoleRoleQueryContainer + from?: integer + sort?: Sort + size?: integer + search_after?: SortResults + } +} + +export interface SecurityQueryRoleResponse { + total: integer + count: integer + roles: SecurityQueryRoleQueryRole[] +} + +export interface SecurityQueryRoleRoleQueryContainer { + bool?: QueryDslBoolQuery + exists?: QueryDslExistsQuery + ids?: QueryDslIdsQuery + match?: Partial> + match_all?: QueryDslMatchAllQuery + prefix?: Partial> + range?: Partial> + simple_query_string?: QueryDslSimpleQueryStringQuery + term?: Partial> + terms?: QueryDslTermsQuery + wildcard?: Partial> +} + +export interface SecurityQueryUserQueryUser extends SecurityUser { + _sort?: SortResults +} + +export interface SecurityQueryUserRequest extends RequestBase { + with_profile_uid?: boolean + /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ + body?: { + query?: SecurityQueryUserUserQueryContainer + from?: integer + sort?: Sort + size?: integer + search_after?: SortResults + } +} + +export interface SecurityQueryUserResponse { + total: integer + count: integer + users: SecurityQueryUserQueryUser[] +} + +export interface SecurityQueryUserUserQueryContainer { + ids?: QueryDslIdsQuery + bool?: QueryDslBoolQuery + exists?: QueryDslExistsQuery + match?: Partial> + match_all?: QueryDslMatchAllQuery + prefix?: Partial> + range?: Partial> + simple_query_string?: QueryDslSimpleQueryStringQuery + term?: Partial> + terms?: QueryDslTermsQuery + wildcard?: Partial> +} + export interface SecuritySamlAuthenticateRequest extends RequestBase { /** @deprecated The use of the 'body' key has been deprecated, move the nested keys to the top level object. */ body?: { @@ -18000,8 +18284,10 @@ export interface SnapshotSnapshotInfo { failures?: SnapshotSnapshotShardFailure[] include_global_state?: boolean indices?: IndexName[] + index_details?: Record metadata?: Metadata reason?: string + repository?: Name snapshot: Name shards?: ShardStatistics start_time?: DateTime @@ -18105,6 +18391,7 @@ export interface SnapshotCreateRequest extends RequestBase { } export interface SnapshotCreateResponse { + accepted?: boolean snapshot?: SnapshotSnapshotInfo } @@ -18141,12 +18428,23 @@ export interface SnapshotGetRequest extends RequestBase { ignore_unavailable?: boolean master_timeout?: Duration verbose?: boolean + index_details?: boolean + index_names?: boolean include_repository?: boolean + sort?: SnapshotSnapshotSort + size?: integer + order?: SortOrder + after?: string + offset?: integer + from_sort_value?: string + slm_policy_filter?: Name } export interface SnapshotGetResponse { responses?: SnapshotGetSnapshotResponseItem[] snapshots?: SnapshotSnapshotInfo[] + total: integer + remaining: integer } export interface SnapshotGetSnapshotResponseItem { @@ -18627,11 +18925,13 @@ export interface TransformSettings { deduce_mappings?: boolean docs_per_second?: float max_page_search_size?: integer + unattended?: boolean } export interface TransformSource { index: Indices query?: QueryDslQueryContainer + runtime_mappings?: MappingRuntimeFields } export interface TransformSyncContainer { @@ -18736,9 +19036,9 @@ export interface TransformGetTransformStatsTransformIndexerStats { export interface TransformGetTransformStatsTransformProgress { docs_indexed: long docs_processed: long - docs_remaining: long - percent_complete: double - total_docs: long + docs_remaining?: long + percent_complete?: double + total_docs?: long } export interface TransformGetTransformStatsTransformStats { @@ -18896,6 +19196,7 @@ export interface WatcherAction { email?: WatcherEmailAction pagerduty?: WatcherPagerDutyAction slack?: WatcherSlackAction + webhook?: WatcherWebhookAction } export type WatcherActionExecutionMode = 'simulate' | 'force_simulate' | 'execute' | 'force_execute' | 'skip' @@ -19601,6 +19902,7 @@ export interface XpackInfoFeatures { vectors?: XpackInfoFeature voting_only: XpackInfoFeature watcher: XpackInfoFeature + archive: XpackInfoFeature } export interface XpackInfoMinimalLicenseInformation { @@ -19688,6 +19990,7 @@ export interface XpackUsageDataTierPhaseStatistics { export interface XpackUsageDataTiers extends XpackUsageBase { data_warm: XpackUsageDataTierPhaseStatistics + data_frozen?: XpackUsageDataTierPhaseStatistics data_cold: XpackUsageDataTierPhaseStatistics data_content: XpackUsageDataTierPhaseStatistics data_hot: XpackUsageDataTierPhaseStatistics @@ -19821,6 +20124,7 @@ export interface XpackUsageMlDataFrameAnalyticsJobsMemory { export interface XpackUsageMlInference { ingest_processors: Record trained_models: XpackUsageMlInferenceTrainedModels + deployments?: XpackUsageMlInferenceDeployments } export interface XpackUsageMlInferenceDeployments { @@ -19852,6 +20156,7 @@ export interface XpackUsageMlInferenceTrainedModels { estimated_heap_memory_usage_bytes?: MlJobStatistics count?: XpackUsageMlInferenceTrainedModelsCount _all: XpackUsageMlCounter + model_size_bytes?: MlJobStatistics } export interface XpackUsageMlInferenceTrainedModelsCount { @@ -19904,6 +20209,7 @@ export interface XpackUsageRequest extends RequestBase { export interface XpackUsageResponse { aggregate_metric: XpackUsageBase analytics: XpackUsageAnalytics + archive: XpackUsageArchive watcher: XpackUsageWatcher ccr: XpackUsageCcr data_frame?: XpackUsageBase