diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index c8f5797767..51045c1ee9 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -4,7 +4,7 @@ steps: provider: "gcp" env: TEST_SUITE: "{{ matrix.suite }}" - STACK_VERSION: 8.14.0-SNAPSHOT + STACK_VERSION: 8.15.0-SNAPSHOT WORKSPACE: /tmp/go-elasticsearch matrix: setup: diff --git a/.github/workflows/test-api.yml b/.github/workflows/test-api.yml index ea153ae3e9..8963b6f240 100644 --- a/.github/workflows/test-api.yml +++ b/.github/workflows/test-api.yml @@ -11,7 +11,7 @@ jobs: test-free: name: Free env: - ELASTICSEARCH_VERSION: elasticsearch:8.14.0-SNAPSHOT + ELASTICSEARCH_VERSION: elasticsearch:8.15.0-SNAPSHOT ELASTICSEARCH_URL: http://localhost:9200 runs-on: ubuntu-latest steps: @@ -43,7 +43,7 @@ jobs: test-platinum: name: Platinum env: - ELASTICSEARCH_VERSION: elasticsearch:8.14.0-SNAPSHOT + ELASTICSEARCH_VERSION: elasticsearch:8.15.0-SNAPSHOT ELASTICSEARCH_URL: https://elastic:elastic@localhost:9200 runs-on: ubuntu-latest steps: diff --git a/Makefile b/Makefile index bbcadb2254..77ed6e728c 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ SHELL := /bin/bash -ELASTICSEARCH_DEFAULT_BUILD_VERSION = "8.14.0-SNAPSHOT" +ELASTICSEARCH_DEFAULT_BUILD_VERSION = "8.15.0-SNAPSHOT" ##@ Test test-unit: ## Run unit tests diff --git a/esapi/api._.go b/esapi/api._.go index e0c025dc1f..797602bfa1 100755 --- a/esapi/api._.go +++ b/esapi/api._.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0 (999dcb8): DO NOT EDIT +// Code generated from specification version 8.15.0 (8abc885): DO NOT EDIT package esapi @@ -48,6 +48,7 @@ type API struct { AutoscalingGetAutoscalingPolicy AutoscalingGetAutoscalingPolicy AutoscalingPutAutoscalingPolicy AutoscalingPutAutoscalingPolicy Bulk Bulk + Capabilities Capabilities ClearScroll ClearScroll ClosePointInTime ClosePointInTime ConnectorCheckIn ConnectorCheckIn @@ -63,6 +64,7 @@ type API struct { ConnectorSecretPut ConnectorSecretPut ConnectorSyncJobCancel ConnectorSyncJobCancel ConnectorSyncJobCheckIn ConnectorSyncJobCheckIn + ConnectorSyncJobClaim ConnectorSyncJobClaim ConnectorSyncJobDelete ConnectorSyncJobDelete ConnectorSyncJobError ConnectorSyncJobError ConnectorSyncJobGet ConnectorSyncJobGet @@ -70,9 +72,12 @@ type API struct { ConnectorSyncJobPost ConnectorSyncJobPost ConnectorSyncJobUpdateStats ConnectorSyncJobUpdateStats ConnectorUpdateAPIKeyDocumentID ConnectorUpdateAPIKeyDocumentID + ConnectorUpdateActiveFiltering ConnectorUpdateActiveFiltering ConnectorUpdateConfiguration ConnectorUpdateConfiguration ConnectorUpdateError ConnectorUpdateError + ConnectorUpdateFeatures ConnectorUpdateFeatures ConnectorUpdateFiltering ConnectorUpdateFiltering + ConnectorUpdateFilteringValidation ConnectorUpdateFilteringValidation ConnectorUpdateIndexName ConnectorUpdateIndexName ConnectorUpdateName ConnectorUpdateName ConnectorUpdateNative ConnectorUpdateNative @@ -121,10 +126,10 @@ type API struct { GraphExplore GraphExplore HealthReport HealthReport Index Index - InferenceDeleteModel InferenceDeleteModel - InferenceGetModel InferenceGetModel + InferenceDelete InferenceDelete + InferenceGet InferenceGet InferenceInference InferenceInference - InferencePutModel InferencePutModel + InferencePut InferencePut Info Info KnnSearch KnnSearch LogstashDeletePipeline LogstashDeletePipeline @@ -139,11 +144,15 @@ type API struct { ProfilingFlamegraph ProfilingFlamegraph ProfilingStacktraces ProfilingStacktraces ProfilingStatus ProfilingStatus + ProfilingTopnFunctions ProfilingTopnFunctions PutScript PutScript - QueryRulesetDelete QueryRulesetDelete - QueryRulesetGet QueryRulesetGet - QueryRulesetList QueryRulesetList - QueryRulesetPut QueryRulesetPut + QueryRulesDeleteRule QueryRulesDeleteRule + QueryRulesDeleteRuleset QueryRulesDeleteRuleset + QueryRulesGetRule QueryRulesGetRule + QueryRulesGetRuleset QueryRulesGetRuleset + QueryRulesListRulesets QueryRulesListRulesets + QueryRulesPutRule QueryRulesPutRule + QueryRulesPutRuleset QueryRulesPutRuleset RankEval RankEval Reindex Reindex ReindexRethrottle ReindexRethrottle @@ -195,6 +204,7 @@ type API struct { TextStructureFindStructure TextStructureFindStructure TextStructureTestGrokPattern TextStructureTestGrokPattern TransformDeleteTransform TransformDeleteTransform + TransformGetNodeStats TransformGetNodeStats TransformGetTransform TransformGetTransform TransformGetTransformStats TransformGetTransformStats TransformPreviewTransform TransformPreviewTransform @@ -527,6 +537,7 @@ type Rollup struct { type Security struct { ActivateUserProfile SecurityActivateUserProfile Authenticate SecurityAuthenticate + BulkPutRole SecurityBulkPutRole BulkUpdateAPIKeys SecurityBulkUpdateAPIKeys ChangePassword SecurityChangePassword ClearAPIKeyCache SecurityClearAPIKeyCache @@ -573,6 +584,7 @@ type Security struct { PutRole SecurityPutRole PutUser SecurityPutUser QueryAPIKeys SecurityQueryAPIKeys + QueryRole SecurityQueryRole QueryUser SecurityQueryUser SamlAuthenticate SecuritySamlAuthenticate SamlCompleteLogout SecuritySamlCompleteLogout @@ -633,6 +645,7 @@ func New(t Transport) *API { AutoscalingGetAutoscalingPolicy: newAutoscalingGetAutoscalingPolicyFunc(t), AutoscalingPutAutoscalingPolicy: newAutoscalingPutAutoscalingPolicyFunc(t), Bulk: newBulkFunc(t), + Capabilities: newCapabilitiesFunc(t), ClearScroll: newClearScrollFunc(t), ClosePointInTime: newClosePointInTimeFunc(t), ConnectorCheckIn: newConnectorCheckInFunc(t), @@ -648,6 +661,7 @@ func New(t Transport) *API { ConnectorSecretPut: newConnectorSecretPutFunc(t), ConnectorSyncJobCancel: newConnectorSyncJobCancelFunc(t), ConnectorSyncJobCheckIn: newConnectorSyncJobCheckInFunc(t), + ConnectorSyncJobClaim: newConnectorSyncJobClaimFunc(t), ConnectorSyncJobDelete: newConnectorSyncJobDeleteFunc(t), ConnectorSyncJobError: newConnectorSyncJobErrorFunc(t), ConnectorSyncJobGet: newConnectorSyncJobGetFunc(t), @@ -655,9 +669,12 @@ func New(t Transport) *API { ConnectorSyncJobPost: newConnectorSyncJobPostFunc(t), ConnectorSyncJobUpdateStats: newConnectorSyncJobUpdateStatsFunc(t), ConnectorUpdateAPIKeyDocumentID: newConnectorUpdateAPIKeyDocumentIDFunc(t), + ConnectorUpdateActiveFiltering: newConnectorUpdateActiveFilteringFunc(t), ConnectorUpdateConfiguration: newConnectorUpdateConfigurationFunc(t), ConnectorUpdateError: newConnectorUpdateErrorFunc(t), + ConnectorUpdateFeatures: newConnectorUpdateFeaturesFunc(t), ConnectorUpdateFiltering: newConnectorUpdateFilteringFunc(t), + ConnectorUpdateFilteringValidation: newConnectorUpdateFilteringValidationFunc(t), ConnectorUpdateIndexName: newConnectorUpdateIndexNameFunc(t), ConnectorUpdateName: newConnectorUpdateNameFunc(t), ConnectorUpdateNative: newConnectorUpdateNativeFunc(t), @@ -706,10 +723,10 @@ func New(t Transport) *API { GraphExplore: newGraphExploreFunc(t), HealthReport: newHealthReportFunc(t), Index: newIndexFunc(t), - InferenceDeleteModel: newInferenceDeleteModelFunc(t), - InferenceGetModel: newInferenceGetModelFunc(t), + InferenceDelete: newInferenceDeleteFunc(t), + InferenceGet: newInferenceGetFunc(t), InferenceInference: newInferenceInferenceFunc(t), - InferencePutModel: newInferencePutModelFunc(t), + InferencePut: newInferencePutFunc(t), Info: newInfoFunc(t), KnnSearch: newKnnSearchFunc(t), LogstashDeletePipeline: newLogstashDeletePipelineFunc(t), @@ -724,11 +741,15 @@ func New(t Transport) *API { ProfilingFlamegraph: newProfilingFlamegraphFunc(t), ProfilingStacktraces: newProfilingStacktracesFunc(t), ProfilingStatus: newProfilingStatusFunc(t), + ProfilingTopnFunctions: newProfilingTopnFunctionsFunc(t), PutScript: newPutScriptFunc(t), - QueryRulesetDelete: newQueryRulesetDeleteFunc(t), - QueryRulesetGet: newQueryRulesetGetFunc(t), - QueryRulesetList: newQueryRulesetListFunc(t), - QueryRulesetPut: newQueryRulesetPutFunc(t), + QueryRulesDeleteRule: newQueryRulesDeleteRuleFunc(t), + QueryRulesDeleteRuleset: newQueryRulesDeleteRulesetFunc(t), + QueryRulesGetRule: newQueryRulesGetRuleFunc(t), + QueryRulesGetRuleset: newQueryRulesGetRulesetFunc(t), + QueryRulesListRulesets: newQueryRulesListRulesetsFunc(t), + QueryRulesPutRule: newQueryRulesPutRuleFunc(t), + QueryRulesPutRuleset: newQueryRulesPutRulesetFunc(t), RankEval: newRankEvalFunc(t), Reindex: newReindexFunc(t), ReindexRethrottle: newReindexRethrottleFunc(t), @@ -780,6 +801,7 @@ func New(t Transport) *API { TextStructureFindStructure: newTextStructureFindStructureFunc(t), TextStructureTestGrokPattern: newTextStructureTestGrokPatternFunc(t), TransformDeleteTransform: newTransformDeleteTransformFunc(t), + TransformGetNodeStats: newTransformGetNodeStatsFunc(t), TransformGetTransform: newTransformGetTransformFunc(t), TransformGetTransformStats: newTransformGetTransformStatsFunc(t), TransformPreviewTransform: newTransformPreviewTransformFunc(t), @@ -1076,6 +1098,7 @@ func New(t Transport) *API { Security: &Security{ ActivateUserProfile: newSecurityActivateUserProfileFunc(t), Authenticate: newSecurityAuthenticateFunc(t), + BulkPutRole: newSecurityBulkPutRoleFunc(t), BulkUpdateAPIKeys: newSecurityBulkUpdateAPIKeysFunc(t), ChangePassword: newSecurityChangePasswordFunc(t), ClearAPIKeyCache: newSecurityClearAPIKeyCacheFunc(t), @@ -1122,6 +1145,7 @@ func New(t Transport) *API { PutRole: newSecurityPutRoleFunc(t), PutUser: newSecurityPutUserFunc(t), QueryAPIKeys: newSecurityQueryAPIKeysFunc(t), + QueryRole: newSecurityQueryRoleFunc(t), QueryUser: newSecurityQueryUserFunc(t), SamlAuthenticate: newSecuritySamlAuthenticateFunc(t), SamlCompleteLogout: newSecuritySamlCompleteLogoutFunc(t), diff --git a/esapi/api.bulk.go b/esapi/api.bulk.go index 8952d23a64..a590c8dd06 100644 --- a/esapi/api.bulk.go +++ b/esapi/api.bulk.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.capabilities.go b/esapi/api.capabilities.go new file mode 100644 index 0000000000..b9436c1dbe --- /dev/null +++ b/esapi/api.capabilities.go @@ -0,0 +1,266 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.15.0: DO NOT EDIT + +package esapi + +import ( + "context" + "net/http" + "strings" +) + +func newCapabilitiesFunc(t Transport) Capabilities { + return func(o ...func(*CapabilitiesRequest)) (*Response, error) { + var r = CapabilitiesRequest{} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// Capabilities checks if the specified combination of method, API, parameters, and arbitrary capabilities are supported +// +// This API is experimental. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/capabilities.html. +type Capabilities func(o ...func(*CapabilitiesRequest)) (*Response, error) + +// CapabilitiesRequest configures the Capabilities API request. +type CapabilitiesRequest struct { + Capabilities string + Method string + Parameters string + Path string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r CapabilitiesRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "capabilities") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "GET" + + path.Grow(7 + len("/_capabilities")) + path.WriteString("http://") + path.WriteString("/_capabilities") + + params = make(map[string]string) + + if r.Capabilities != "" { + params["capabilities"] = r.Capabilities + } + + if r.Method != "" { + params["method"] = r.Method + } + + if r.Parameters != "" { + params["parameters"] = r.Parameters + } + + if r.Path != "" { + params["path"] = r.Path + } + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), nil) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "capabilities") + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "capabilities") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f Capabilities) WithContext(v context.Context) func(*CapabilitiesRequest) { + return func(r *CapabilitiesRequest) { + r.ctx = v + } +} + +// WithCapabilities - comma-separated list of arbitrary api capabilities to check. +func (f Capabilities) WithCapabilities(v string) func(*CapabilitiesRequest) { + return func(r *CapabilitiesRequest) { + r.Capabilities = v + } +} + +// WithMethod - rest method to check. +func (f Capabilities) WithMethod(v string) func(*CapabilitiesRequest) { + return func(r *CapabilitiesRequest) { + r.Method = v + } +} + +// WithParameters - comma-separated list of api parameters to check. +func (f Capabilities) WithParameters(v string) func(*CapabilitiesRequest) { + return func(r *CapabilitiesRequest) { + r.Parameters = v + } +} + +// WithPath - api path to check. +func (f Capabilities) WithPath(v string) func(*CapabilitiesRequest) { + return func(r *CapabilitiesRequest) { + r.Path = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f Capabilities) WithPretty() func(*CapabilitiesRequest) { + return func(r *CapabilitiesRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f Capabilities) WithHuman() func(*CapabilitiesRequest) { + return func(r *CapabilitiesRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f Capabilities) WithErrorTrace() func(*CapabilitiesRequest) { + return func(r *CapabilitiesRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f Capabilities) WithFilterPath(v ...string) func(*CapabilitiesRequest) { + return func(r *CapabilitiesRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f Capabilities) WithHeader(h map[string]string) func(*CapabilitiesRequest) { + return func(r *CapabilitiesRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f Capabilities) WithOpaqueID(s string) func(*CapabilitiesRequest) { + return func(r *CapabilitiesRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.cat.aliases.go b/esapi/api.cat.aliases.go index 28f7993052..ab02ac1c1b 100644 --- a/esapi/api.cat.aliases.go +++ b/esapi/api.cat.aliases.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.allocation.go b/esapi/api.cat.allocation.go index 1e7221ab3a..2302a9b22a 100644 --- a/esapi/api.cat.allocation.go +++ b/esapi/api.cat.allocation.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.component_templates.go b/esapi/api.cat.component_templates.go index 9cdbbed830..48d524f76c 100644 --- a/esapi/api.cat.component_templates.go +++ b/esapi/api.cat.component_templates.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.count.go b/esapi/api.cat.count.go index f96d47edb3..b86e5277f8 100644 --- a/esapi/api.cat.count.go +++ b/esapi/api.cat.count.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.fielddata.go b/esapi/api.cat.fielddata.go index 3dc35dcecb..ab13fc82ff 100644 --- a/esapi/api.cat.fielddata.go +++ b/esapi/api.cat.fielddata.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.health.go b/esapi/api.cat.health.go index 79ade9baac..e4df856b88 100644 --- a/esapi/api.cat.health.go +++ b/esapi/api.cat.health.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.help.go b/esapi/api.cat.help.go index e825ddbc02..f13d25232d 100644 --- a/esapi/api.cat.help.go +++ b/esapi/api.cat.help.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.indices.go b/esapi/api.cat.indices.go index e8afd12a73..bdac9493e0 100644 --- a/esapi/api.cat.indices.go +++ b/esapi/api.cat.indices.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.master.go b/esapi/api.cat.master.go index 5fffb05799..3f088a2e6a 100644 --- a/esapi/api.cat.master.go +++ b/esapi/api.cat.master.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.nodeattrs.go b/esapi/api.cat.nodeattrs.go index 87b4f484fa..6f09f0d9bf 100644 --- a/esapi/api.cat.nodeattrs.go +++ b/esapi/api.cat.nodeattrs.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.nodes.go b/esapi/api.cat.nodes.go index 6754491e78..c16c071115 100644 --- a/esapi/api.cat.nodes.go +++ b/esapi/api.cat.nodes.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.pending_tasks.go b/esapi/api.cat.pending_tasks.go index 4f971bc65f..82df780d73 100644 --- a/esapi/api.cat.pending_tasks.go +++ b/esapi/api.cat.pending_tasks.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.plugins.go b/esapi/api.cat.plugins.go index 65db3f794b..a5a9be7c27 100644 --- a/esapi/api.cat.plugins.go +++ b/esapi/api.cat.plugins.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.recovery.go b/esapi/api.cat.recovery.go index d1b75a53bd..1c35c19a8d 100644 --- a/esapi/api.cat.recovery.go +++ b/esapi/api.cat.recovery.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.repositories.go b/esapi/api.cat.repositories.go index b9234bb53b..84dfe9a20c 100644 --- a/esapi/api.cat.repositories.go +++ b/esapi/api.cat.repositories.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.segments.go b/esapi/api.cat.segments.go index 892d38cbc1..b5415a1ec3 100644 --- a/esapi/api.cat.segments.go +++ b/esapi/api.cat.segments.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.shards.go b/esapi/api.cat.shards.go index bcb270828f..ed41fff055 100644 --- a/esapi/api.cat.shards.go +++ b/esapi/api.cat.shards.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.snapshots.go b/esapi/api.cat.snapshots.go index c572a982b1..f21c95774f 100644 --- a/esapi/api.cat.snapshots.go +++ b/esapi/api.cat.snapshots.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.tasks.go b/esapi/api.cat.tasks.go index 8676cf9078..e4bd78109d 100644 --- a/esapi/api.cat.tasks.go +++ b/esapi/api.cat.tasks.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.templates.go b/esapi/api.cat.templates.go index e967175a15..d3711fc6f7 100644 --- a/esapi/api.cat.templates.go +++ b/esapi/api.cat.templates.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cat.thread_pool.go b/esapi/api.cat.thread_pool.go index 816f6c5a0c..6ad19da78d 100644 --- a/esapi/api.cat.thread_pool.go +++ b/esapi/api.cat.thread_pool.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.clear_scroll.go b/esapi/api.clear_scroll.go index e4331102bc..5e77989c0b 100644 --- a/esapi/api.clear_scroll.go +++ b/esapi/api.clear_scroll.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.allocation_explain.go b/esapi/api.cluster.allocation_explain.go index 49dcd6d60f..9c5635f2b7 100644 --- a/esapi/api.cluster.allocation_explain.go +++ b/esapi/api.cluster.allocation_explain.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -25,6 +25,7 @@ import ( "net/http" "strconv" "strings" + "time" ) func newClusterAllocationExplainFunc(t Transport) ClusterAllocationExplain { @@ -55,6 +56,7 @@ type ClusterAllocationExplainRequest struct { IncludeDiskInfo *bool IncludeYesDecisions *bool + MasterTimeout time.Duration Pretty bool Human bool @@ -101,6 +103,10 @@ func (r ClusterAllocationExplainRequest) Do(providedCtx context.Context, transpo params["include_yes_decisions"] = strconv.FormatBool(*r.IncludeYesDecisions) } + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -207,6 +213,13 @@ func (f ClusterAllocationExplain) WithIncludeYesDecisions(v bool) func(*ClusterA } } +// WithMasterTimeout - timeout for connection to master node. +func (f ClusterAllocationExplain) WithMasterTimeout(v time.Duration) func(*ClusterAllocationExplainRequest) { + return func(r *ClusterAllocationExplainRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f ClusterAllocationExplain) WithPretty() func(*ClusterAllocationExplainRequest) { return func(r *ClusterAllocationExplainRequest) { diff --git a/esapi/api.cluster.delete_component_template.go b/esapi/api.cluster.delete_component_template.go index 3f58f06d8d..833b79c3ff 100644 --- a/esapi/api.cluster.delete_component_template.go +++ b/esapi/api.cluster.delete_component_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.delete_voting_config_exclusions.go b/esapi/api.cluster.delete_voting_config_exclusions.go index a8357ac7f7..4d7479d2d4 100644 --- a/esapi/api.cluster.delete_voting_config_exclusions.go +++ b/esapi/api.cluster.delete_voting_config_exclusions.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.exists_component_template.go b/esapi/api.cluster.exists_component_template.go index 4089fde39d..b00cfa6136 100644 --- a/esapi/api.cluster.exists_component_template.go +++ b/esapi/api.cluster.exists_component_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.get_component_template.go b/esapi/api.cluster.get_component_template.go index 77071d58ed..8a7876cfcd 100644 --- a/esapi/api.cluster.get_component_template.go +++ b/esapi/api.cluster.get_component_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.get_settings.go b/esapi/api.cluster.get_settings.go index fd6278d54b..fe025bf8e4 100644 --- a/esapi/api.cluster.get_settings.go +++ b/esapi/api.cluster.get_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.health.go b/esapi/api.cluster.health.go index 744348326c..6ac6406454 100644 --- a/esapi/api.cluster.health.go +++ b/esapi/api.cluster.health.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.info.go b/esapi/api.cluster.info.go index 6ada054f37..d816f86f56 100644 --- a/esapi/api.cluster.info.go +++ b/esapi/api.cluster.info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.pending_tasks.go b/esapi/api.cluster.pending_tasks.go index 58d9db2a39..91aa0e5721 100644 --- a/esapi/api.cluster.pending_tasks.go +++ b/esapi/api.cluster.pending_tasks.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.post_voting_config_exclusions.go b/esapi/api.cluster.post_voting_config_exclusions.go index 6bed21496c..ae27647486 100644 --- a/esapi/api.cluster.post_voting_config_exclusions.go +++ b/esapi/api.cluster.post_voting_config_exclusions.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.put_component_template.go b/esapi/api.cluster.put_component_template.go index 6a2f38114a..91268937f7 100644 --- a/esapi/api.cluster.put_component_template.go +++ b/esapi/api.cluster.put_component_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.put_settings.go b/esapi/api.cluster.put_settings.go index f70f2a7b92..a57953330a 100644 --- a/esapi/api.cluster.put_settings.go +++ b/esapi/api.cluster.put_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.remote_info.go b/esapi/api.cluster.remote_info.go index 9c30bd4ab4..957beeac6b 100644 --- a/esapi/api.cluster.remote_info.go +++ b/esapi/api.cluster.remote_info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.reroute.go b/esapi/api.cluster.reroute.go index c8510e7719..05cadf6218 100644 --- a/esapi/api.cluster.reroute.go +++ b/esapi/api.cluster.reroute.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.state.go b/esapi/api.cluster.state.go index 1926dd30d5..f0acf517b3 100644 --- a/esapi/api.cluster.state.go +++ b/esapi/api.cluster.state.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.cluster.stats.go b/esapi/api.cluster.stats.go index 335cb77003..d9a5c15468 100644 --- a/esapi/api.cluster.stats.go +++ b/esapi/api.cluster.stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.check_in.go b/esapi/api.connector.check_in.go index 7d0f66a11a..a3ded928e7 100644 --- a/esapi/api.connector.check_in.go +++ b/esapi/api.connector.check_in.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.delete.go b/esapi/api.connector.delete.go index df9f221f9e..9e071382f4 100644 --- a/esapi/api.connector.delete.go +++ b/esapi/api.connector.delete.go @@ -15,13 +15,14 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi import ( "context" "net/http" + "strconv" "strings" ) @@ -53,6 +54,8 @@ type ConnectorDelete func(connector_id string, o ...func(*ConnectorDeleteRequest type ConnectorDeleteRequest struct { ConnectorID string + DeleteSyncJobs *bool + Pretty bool Human bool ErrorTrace bool @@ -96,6 +99,10 @@ func (r ConnectorDeleteRequest) Do(providedCtx context.Context, transport Transp params = make(map[string]string) + if r.DeleteSyncJobs != nil { + params["delete_sync_jobs"] = strconv.FormatBool(*r.DeleteSyncJobs) + } + if r.Pretty { params["pretty"] = "true" } @@ -174,6 +181,13 @@ func (f ConnectorDelete) WithContext(v context.Context) func(*ConnectorDeleteReq } } +// WithDeleteSyncJobs - determines whether associated sync jobs are also deleted.. +func (f ConnectorDelete) WithDeleteSyncJobs(v bool) func(*ConnectorDeleteRequest) { + return func(r *ConnectorDeleteRequest) { + r.DeleteSyncJobs = &v + } +} + // WithPretty makes the response body pretty-printed. func (f ConnectorDelete) WithPretty() func(*ConnectorDeleteRequest) { return func(r *ConnectorDeleteRequest) { diff --git a/esapi/api.connector.get.go b/esapi/api.connector.get.go index 6e27967228..7048703e9d 100644 --- a/esapi/api.connector.get.go +++ b/esapi/api.connector.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.last_sync.go b/esapi/api.connector.last_sync.go index 9b39d5b153..55ce3ea43f 100644 --- a/esapi/api.connector.last_sync.go +++ b/esapi/api.connector.last_sync.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.list.go b/esapi/api.connector.list.go index 19cc2c8ca9..b9999358a5 100644 --- a/esapi/api.connector.list.go +++ b/esapi/api.connector.list.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.post.go b/esapi/api.connector.post.go index e7db223c61..9246573a0a 100644 --- a/esapi/api.connector.post.go +++ b/esapi/api.connector.post.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -27,8 +27,8 @@ import ( ) func newConnectorPostFunc(t Transport) ConnectorPost { - return func(body io.Reader, o ...func(*ConnectorPostRequest)) (*Response, error) { - var r = ConnectorPostRequest{Body: body} + return func(o ...func(*ConnectorPostRequest)) (*Response, error) { + var r = ConnectorPostRequest{} for _, f := range o { f(&r) } @@ -48,7 +48,7 @@ func newConnectorPostFunc(t Transport) ConnectorPost { // This API is experimental. // // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/create-connector-api.html. -type ConnectorPost func(body io.Reader, o ...func(*ConnectorPostRequest)) (*Response, error) +type ConnectorPost func(o ...func(*ConnectorPostRequest)) (*Response, error) // ConnectorPostRequest configures the Connector Post API request. type ConnectorPostRequest struct { @@ -176,6 +176,13 @@ func (f ConnectorPost) WithContext(v context.Context) func(*ConnectorPostRequest } } +// WithBody - The connector configuration.. +func (f ConnectorPost) WithBody(v io.Reader) func(*ConnectorPostRequest) { + return func(r *ConnectorPostRequest) { + r.Body = v + } +} + // WithPretty makes the response body pretty-printed. func (f ConnectorPost) WithPretty() func(*ConnectorPostRequest) { return func(r *ConnectorPostRequest) { diff --git a/esapi/api.connector.put.go b/esapi/api.connector.put.go index 3951115c42..1e9e6b5693 100644 --- a/esapi/api.connector.put.go +++ b/esapi/api.connector.put.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -27,8 +27,8 @@ import ( ) func newConnectorPutFunc(t Transport) ConnectorPut { - return func(body io.Reader, connector_id string, o ...func(*ConnectorPutRequest)) (*Response, error) { - var r = ConnectorPutRequest{Body: body, ConnectorID: connector_id} + return func(o ...func(*ConnectorPutRequest)) (*Response, error) { + var r = ConnectorPutRequest{} for _, f := range o { f(&r) } @@ -48,7 +48,7 @@ func newConnectorPutFunc(t Transport) ConnectorPut { // This API is experimental. // // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/create-connector-api.html. -type ConnectorPut func(body io.Reader, connector_id string, o ...func(*ConnectorPutRequest)) (*Response, error) +type ConnectorPut func(o ...func(*ConnectorPutRequest)) (*Response, error) // ConnectorPutRequest configures the Connector Put API request. type ConnectorPutRequest struct { @@ -91,10 +91,12 @@ func (r ConnectorPutRequest) Do(providedCtx context.Context, transport Transport path.WriteString("http://") path.WriteString("/") path.WriteString("_connector") - path.WriteString("/") - path.WriteString(r.ConnectorID) - if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.RecordPathPart(ctx, "connector_id", r.ConnectorID) + if r.ConnectorID != "" { + path.WriteString("/") + path.WriteString(r.ConnectorID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "connector_id", r.ConnectorID) + } } params = make(map[string]string) @@ -184,6 +186,20 @@ func (f ConnectorPut) WithContext(v context.Context) func(*ConnectorPutRequest) } } +// WithBody - The connector configuration.. +func (f ConnectorPut) WithBody(v io.Reader) func(*ConnectorPutRequest) { + return func(r *ConnectorPutRequest) { + r.Body = v + } +} + +// WithConnectorID - the unique identifier of the connector to be created or updated.. +func (f ConnectorPut) WithConnectorID(v string) func(*ConnectorPutRequest) { + return func(r *ConnectorPutRequest) { + r.ConnectorID = v + } +} + // WithPretty makes the response body pretty-printed. func (f ConnectorPut) WithPretty() func(*ConnectorPutRequest) { return func(r *ConnectorPutRequest) { diff --git a/esapi/api.connector_secret.delete.go b/esapi/api.connector.secret_delete.go similarity index 95% rename from esapi/api.connector_secret.delete.go rename to esapi/api.connector.secret_delete.go index 6f578e2e2e..d06aebc357 100644 --- a/esapi/api.connector_secret.delete.go +++ b/esapi/api.connector.secret_delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -73,7 +73,7 @@ func (r ConnectorSecretDeleteRequest) Do(providedCtx context.Context, transport ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "connector_secret.delete") + ctx = instrument.Start(providedCtx, "connector.secret_delete") defer instrument.Close(ctx) } if ctx == nil { @@ -145,11 +145,11 @@ func (r ConnectorSecretDeleteRequest) Do(providedCtx context.Context, transport } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "connector_secret.delete") + instrument.BeforeRequest(req, "connector.secret_delete") } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "connector_secret.delete") + instrument.AfterRequest(req, "elasticsearch", "connector.secret_delete") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { diff --git a/esapi/api.connector_secret.get.go b/esapi/api.connector.secret_get.go similarity index 95% rename from esapi/api.connector_secret.get.go rename to esapi/api.connector.secret_get.go index e9447f9826..145cdd45fe 100644 --- a/esapi/api.connector_secret.get.go +++ b/esapi/api.connector.secret_get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -73,7 +73,7 @@ func (r ConnectorSecretGetRequest) Do(providedCtx context.Context, transport Tra ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "connector_secret.get") + ctx = instrument.Start(providedCtx, "connector.secret_get") defer instrument.Close(ctx) } if ctx == nil { @@ -145,11 +145,11 @@ func (r ConnectorSecretGetRequest) Do(providedCtx context.Context, transport Tra } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "connector_secret.get") + instrument.BeforeRequest(req, "connector.secret_get") } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "connector_secret.get") + instrument.AfterRequest(req, "elasticsearch", "connector.secret_get") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { diff --git a/esapi/api.connector_secret.post.go b/esapi/api.connector.secret_post.go similarity index 93% rename from esapi/api.connector_secret.post.go rename to esapi/api.connector.secret_post.go index f698358b44..6ab485fe48 100644 --- a/esapi/api.connector_secret.post.go +++ b/esapi/api.connector.secret_post.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -74,7 +74,7 @@ func (r ConnectorSecretPostRequest) Do(providedCtx context.Context, transport Tr ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "connector_secret.post") + ctx = instrument.Start(providedCtx, "connector.secret_post") defer instrument.Close(ctx) } if ctx == nil { @@ -142,14 +142,14 @@ func (r ConnectorSecretPostRequest) Do(providedCtx context.Context, transport Tr } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "connector_secret.post") - if reader := instrument.RecordRequestBody(ctx, "connector_secret.post", r.Body); reader != nil { + instrument.BeforeRequest(req, "connector.secret_post") + if reader := instrument.RecordRequestBody(ctx, "connector.secret_post", r.Body); reader != nil { req.Body = reader } } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "connector_secret.post") + instrument.AfterRequest(req, "elasticsearch", "connector.secret_post") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { diff --git a/esapi/api.connector_secret.put.go b/esapi/api.connector.secret_put.go similarity index 94% rename from esapi/api.connector_secret.put.go rename to esapi/api.connector.secret_put.go index 912e3c3211..8660705cbb 100644 --- a/esapi/api.connector_secret.put.go +++ b/esapi/api.connector.secret_put.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -76,7 +76,7 @@ func (r ConnectorSecretPutRequest) Do(providedCtx context.Context, transport Tra ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "connector_secret.put") + ctx = instrument.Start(providedCtx, "connector.secret_put") defer instrument.Close(ctx) } if ctx == nil { @@ -152,14 +152,14 @@ func (r ConnectorSecretPutRequest) Do(providedCtx context.Context, transport Tra } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "connector_secret.put") - if reader := instrument.RecordRequestBody(ctx, "connector_secret.put", r.Body); reader != nil { + instrument.BeforeRequest(req, "connector.secret_put") + if reader := instrument.RecordRequestBody(ctx, "connector.secret_put", r.Body); reader != nil { req.Body = reader } } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "connector_secret.put") + instrument.AfterRequest(req, "elasticsearch", "connector.secret_put") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { diff --git a/esapi/api.connector_sync_job.cancel.go b/esapi/api.connector.sync_job_cancel.go similarity index 95% rename from esapi/api.connector_sync_job.cancel.go rename to esapi/api.connector.sync_job_cancel.go index c680135162..d70e4c80f0 100644 --- a/esapi/api.connector_sync_job.cancel.go +++ b/esapi/api.connector.sync_job_cancel.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -75,7 +75,7 @@ func (r ConnectorSyncJobCancelRequest) Do(providedCtx context.Context, transport ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "connector_sync_job.cancel") + ctx = instrument.Start(providedCtx, "connector.sync_job_cancel") defer instrument.Close(ctx) } if ctx == nil { @@ -149,11 +149,11 @@ func (r ConnectorSyncJobCancelRequest) Do(providedCtx context.Context, transport } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "connector_sync_job.cancel") + instrument.BeforeRequest(req, "connector.sync_job_cancel") } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "connector_sync_job.cancel") + instrument.AfterRequest(req, "elasticsearch", "connector.sync_job_cancel") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { diff --git a/esapi/api.connector_sync_job.check_in.go b/esapi/api.connector.sync_job_check_in.go similarity index 95% rename from esapi/api.connector_sync_job.check_in.go rename to esapi/api.connector.sync_job_check_in.go index 335eec5128..bc55c06a66 100644 --- a/esapi/api.connector_sync_job.check_in.go +++ b/esapi/api.connector.sync_job_check_in.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -75,7 +75,7 @@ func (r ConnectorSyncJobCheckInRequest) Do(providedCtx context.Context, transpor ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "connector_sync_job.check_in") + ctx = instrument.Start(providedCtx, "connector.sync_job_check_in") defer instrument.Close(ctx) } if ctx == nil { @@ -149,11 +149,11 @@ func (r ConnectorSyncJobCheckInRequest) Do(providedCtx context.Context, transpor } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "connector_sync_job.check_in") + instrument.BeforeRequest(req, "connector.sync_job_check_in") } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "connector_sync_job.check_in") + instrument.AfterRequest(req, "elasticsearch", "connector.sync_job_check_in") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { diff --git a/esapi/api.connector.sync_job_claim.go b/esapi/api.connector.sync_job_claim.go new file mode 100644 index 0000000000..237efc2986 --- /dev/null +++ b/esapi/api.connector.sync_job_claim.go @@ -0,0 +1,239 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.15.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newConnectorSyncJobClaimFunc(t Transport) ConnectorSyncJobClaim { + return func(body io.Reader, connector_sync_job_id string, o ...func(*ConnectorSyncJobClaimRequest)) (*Response, error) { + var r = ConnectorSyncJobClaimRequest{Body: body, ConnectorSyncJobID: connector_sync_job_id} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ConnectorSyncJobClaim claims a connector sync job. +// +// This API is experimental. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/claim-connector-sync-job-api.html. +type ConnectorSyncJobClaim func(body io.Reader, connector_sync_job_id string, o ...func(*ConnectorSyncJobClaimRequest)) (*Response, error) + +// ConnectorSyncJobClaimRequest configures the Connector Sync Job Claim API request. +type ConnectorSyncJobClaimRequest struct { + Body io.Reader + + ConnectorSyncJobID string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ConnectorSyncJobClaimRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "connector.sync_job_claim") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "PUT" + + path.Grow(7 + 1 + len("_connector") + 1 + len("_sync_job") + 1 + len(r.ConnectorSyncJobID) + 1 + len("_claim")) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_connector") + path.WriteString("/") + path.WriteString("_sync_job") + path.WriteString("/") + path.WriteString(r.ConnectorSyncJobID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "connector_sync_job_id", r.ConnectorSyncJobID) + } + path.WriteString("/") + path.WriteString("_claim") + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "connector.sync_job_claim") + if reader := instrument.RecordRequestBody(ctx, "connector.sync_job_claim", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "connector.sync_job_claim") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ConnectorSyncJobClaim) WithContext(v context.Context) func(*ConnectorSyncJobClaimRequest) { + return func(r *ConnectorSyncJobClaimRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ConnectorSyncJobClaim) WithPretty() func(*ConnectorSyncJobClaimRequest) { + return func(r *ConnectorSyncJobClaimRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ConnectorSyncJobClaim) WithHuman() func(*ConnectorSyncJobClaimRequest) { + return func(r *ConnectorSyncJobClaimRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ConnectorSyncJobClaim) WithErrorTrace() func(*ConnectorSyncJobClaimRequest) { + return func(r *ConnectorSyncJobClaimRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ConnectorSyncJobClaim) WithFilterPath(v ...string) func(*ConnectorSyncJobClaimRequest) { + return func(r *ConnectorSyncJobClaimRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ConnectorSyncJobClaim) WithHeader(h map[string]string) func(*ConnectorSyncJobClaimRequest) { + return func(r *ConnectorSyncJobClaimRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ConnectorSyncJobClaim) WithOpaqueID(s string) func(*ConnectorSyncJobClaimRequest) { + return func(r *ConnectorSyncJobClaimRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.connector_sync_job.delete.go b/esapi/api.connector.sync_job_delete.go similarity index 95% rename from esapi/api.connector_sync_job.delete.go rename to esapi/api.connector.sync_job_delete.go index 0c29dd3645..c089c9a932 100644 --- a/esapi/api.connector_sync_job.delete.go +++ b/esapi/api.connector.sync_job_delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -75,7 +75,7 @@ func (r ConnectorSyncJobDeleteRequest) Do(providedCtx context.Context, transport ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "connector_sync_job.delete") + ctx = instrument.Start(providedCtx, "connector.sync_job_delete") defer instrument.Close(ctx) } if ctx == nil { @@ -147,11 +147,11 @@ func (r ConnectorSyncJobDeleteRequest) Do(providedCtx context.Context, transport } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "connector_sync_job.delete") + instrument.BeforeRequest(req, "connector.sync_job_delete") } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "connector_sync_job.delete") + instrument.AfterRequest(req, "elasticsearch", "connector.sync_job_delete") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { diff --git a/esapi/api.connector_sync_job.error.go b/esapi/api.connector.sync_job_error.go similarity index 94% rename from esapi/api.connector_sync_job.error.go rename to esapi/api.connector.sync_job_error.go index 57e47f9e21..750331c6a3 100644 --- a/esapi/api.connector_sync_job.error.go +++ b/esapi/api.connector.sync_job_error.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -78,7 +78,7 @@ func (r ConnectorSyncJobErrorRequest) Do(providedCtx context.Context, transport ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "connector_sync_job.error") + ctx = instrument.Start(providedCtx, "connector.sync_job_error") defer instrument.Close(ctx) } if ctx == nil { @@ -156,14 +156,14 @@ func (r ConnectorSyncJobErrorRequest) Do(providedCtx context.Context, transport } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "connector_sync_job.error") - if reader := instrument.RecordRequestBody(ctx, "connector_sync_job.error", r.Body); reader != nil { + instrument.BeforeRequest(req, "connector.sync_job_error") + if reader := instrument.RecordRequestBody(ctx, "connector.sync_job_error", r.Body); reader != nil { req.Body = reader } } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "connector_sync_job.error") + instrument.AfterRequest(req, "elasticsearch", "connector.sync_job_error") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { diff --git a/esapi/api.connector_sync_job.get.go b/esapi/api.connector.sync_job_get.go similarity index 95% rename from esapi/api.connector_sync_job.get.go rename to esapi/api.connector.sync_job_get.go index a05bd81170..17d179eb15 100644 --- a/esapi/api.connector_sync_job.get.go +++ b/esapi/api.connector.sync_job_get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -75,7 +75,7 @@ func (r ConnectorSyncJobGetRequest) Do(providedCtx context.Context, transport Tr ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "connector_sync_job.get") + ctx = instrument.Start(providedCtx, "connector.sync_job_get") defer instrument.Close(ctx) } if ctx == nil { @@ -147,11 +147,11 @@ func (r ConnectorSyncJobGetRequest) Do(providedCtx context.Context, transport Tr } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "connector_sync_job.get") + instrument.BeforeRequest(req, "connector.sync_job_get") } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "connector_sync_job.get") + instrument.AfterRequest(req, "elasticsearch", "connector.sync_job_get") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { diff --git a/esapi/api.connector_sync_job.list.go b/esapi/api.connector.sync_job_list.go similarity index 96% rename from esapi/api.connector_sync_job.list.go rename to esapi/api.connector.sync_job_list.go index 5097e1d8ac..bd4059285d 100644 --- a/esapi/api.connector_sync_job.list.go +++ b/esapi/api.connector.sync_job_list.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -80,7 +80,7 @@ func (r ConnectorSyncJobListRequest) Do(providedCtx context.Context, transport T ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "connector_sync_job.list") + ctx = instrument.Start(providedCtx, "connector.sync_job_list") defer instrument.Close(ctx) } if ctx == nil { @@ -164,11 +164,11 @@ func (r ConnectorSyncJobListRequest) Do(providedCtx context.Context, transport T } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "connector_sync_job.list") + instrument.BeforeRequest(req, "connector.sync_job_list") } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "connector_sync_job.list") + instrument.AfterRequest(req, "elasticsearch", "connector.sync_job_list") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { diff --git a/esapi/api.connector_sync_job.post.go b/esapi/api.connector.sync_job_post.go similarity index 93% rename from esapi/api.connector_sync_job.post.go rename to esapi/api.connector.sync_job_post.go index 3eabd41cfa..0ae5b80c7f 100644 --- a/esapi/api.connector_sync_job.post.go +++ b/esapi/api.connector.sync_job_post.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -76,7 +76,7 @@ func (r ConnectorSyncJobPostRequest) Do(providedCtx context.Context, transport T ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "connector_sync_job.post") + ctx = instrument.Start(providedCtx, "connector.sync_job_post") defer instrument.Close(ctx) } if ctx == nil { @@ -144,14 +144,14 @@ func (r ConnectorSyncJobPostRequest) Do(providedCtx context.Context, transport T } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "connector_sync_job.post") - if reader := instrument.RecordRequestBody(ctx, "connector_sync_job.post", r.Body); reader != nil { + instrument.BeforeRequest(req, "connector.sync_job_post") + if reader := instrument.RecordRequestBody(ctx, "connector.sync_job_post", r.Body); reader != nil { req.Body = reader } } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "connector_sync_job.post") + instrument.AfterRequest(req, "elasticsearch", "connector.sync_job_post") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { diff --git a/esapi/api.connector_sync_job.update_stats.go b/esapi/api.connector.sync_job_update_stats.go similarity index 94% rename from esapi/api.connector_sync_job.update_stats.go rename to esapi/api.connector.sync_job_update_stats.go index 63e00f266d..868669f96c 100644 --- a/esapi/api.connector_sync_job.update_stats.go +++ b/esapi/api.connector.sync_job_update_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -78,7 +78,7 @@ func (r ConnectorSyncJobUpdateStatsRequest) Do(providedCtx context.Context, tran ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "connector_sync_job.update_stats") + ctx = instrument.Start(providedCtx, "connector.sync_job_update_stats") defer instrument.Close(ctx) } if ctx == nil { @@ -156,14 +156,14 @@ func (r ConnectorSyncJobUpdateStatsRequest) Do(providedCtx context.Context, tran } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "connector_sync_job.update_stats") - if reader := instrument.RecordRequestBody(ctx, "connector_sync_job.update_stats", r.Body); reader != nil { + instrument.BeforeRequest(req, "connector.sync_job_update_stats") + if reader := instrument.RecordRequestBody(ctx, "connector.sync_job_update_stats", r.Body); reader != nil { req.Body = reader } } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "connector_sync_job.update_stats") + instrument.AfterRequest(req, "elasticsearch", "connector.sync_job_update_stats") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { diff --git a/esapi/api.connector.update_active_filtering.go b/esapi/api.connector.update_active_filtering.go new file mode 100644 index 0000000000..733c29fb4c --- /dev/null +++ b/esapi/api.connector.update_active_filtering.go @@ -0,0 +1,229 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.15.0: DO NOT EDIT + +package esapi + +import ( + "context" + "net/http" + "strings" +) + +func newConnectorUpdateActiveFilteringFunc(t Transport) ConnectorUpdateActiveFiltering { + return func(connector_id string, o ...func(*ConnectorUpdateActiveFilteringRequest)) (*Response, error) { + var r = ConnectorUpdateActiveFilteringRequest{ConnectorID: connector_id} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ConnectorUpdateActiveFiltering activates the draft filtering rules if they are in a validated state. +// +// This API is experimental. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-filtering-api.html. +type ConnectorUpdateActiveFiltering func(connector_id string, o ...func(*ConnectorUpdateActiveFilteringRequest)) (*Response, error) + +// ConnectorUpdateActiveFilteringRequest configures the Connector Update Active Filtering API request. +type ConnectorUpdateActiveFilteringRequest struct { + ConnectorID string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ConnectorUpdateActiveFilteringRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "connector.update_active_filtering") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "PUT" + + path.Grow(7 + 1 + len("_connector") + 1 + len(r.ConnectorID) + 1 + len("_filtering") + 1 + len("_activate")) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_connector") + path.WriteString("/") + path.WriteString(r.ConnectorID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "connector_id", r.ConnectorID) + } + path.WriteString("/") + path.WriteString("_filtering") + path.WriteString("/") + path.WriteString("_activate") + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), nil) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "connector.update_active_filtering") + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "connector.update_active_filtering") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ConnectorUpdateActiveFiltering) WithContext(v context.Context) func(*ConnectorUpdateActiveFilteringRequest) { + return func(r *ConnectorUpdateActiveFilteringRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ConnectorUpdateActiveFiltering) WithPretty() func(*ConnectorUpdateActiveFilteringRequest) { + return func(r *ConnectorUpdateActiveFilteringRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ConnectorUpdateActiveFiltering) WithHuman() func(*ConnectorUpdateActiveFilteringRequest) { + return func(r *ConnectorUpdateActiveFilteringRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ConnectorUpdateActiveFiltering) WithErrorTrace() func(*ConnectorUpdateActiveFilteringRequest) { + return func(r *ConnectorUpdateActiveFilteringRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ConnectorUpdateActiveFiltering) WithFilterPath(v ...string) func(*ConnectorUpdateActiveFilteringRequest) { + return func(r *ConnectorUpdateActiveFilteringRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ConnectorUpdateActiveFiltering) WithHeader(h map[string]string) func(*ConnectorUpdateActiveFilteringRequest) { + return func(r *ConnectorUpdateActiveFilteringRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ConnectorUpdateActiveFiltering) WithOpaqueID(s string) func(*ConnectorUpdateActiveFilteringRequest) { + return func(r *ConnectorUpdateActiveFilteringRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.connector.update_api_key_id.go b/esapi/api.connector.update_api_key_id.go index eb9418703e..17eefd3afe 100644 --- a/esapi/api.connector.update_api_key_id.go +++ b/esapi/api.connector.update_api_key_id.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_configuration.go b/esapi/api.connector.update_configuration.go index a3ee023fbf..4f872c684b 100644 --- a/esapi/api.connector.update_configuration.go +++ b/esapi/api.connector.update_configuration.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_error.go b/esapi/api.connector.update_error.go index a37abbdaa8..94f8972a62 100644 --- a/esapi/api.connector.update_error.go +++ b/esapi/api.connector.update_error.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_features.go b/esapi/api.connector.update_features.go new file mode 100644 index 0000000000..48396f41e4 --- /dev/null +++ b/esapi/api.connector.update_features.go @@ -0,0 +1,237 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.15.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newConnectorUpdateFeaturesFunc(t Transport) ConnectorUpdateFeatures { + return func(body io.Reader, connector_id string, o ...func(*ConnectorUpdateFeaturesRequest)) (*Response, error) { + var r = ConnectorUpdateFeaturesRequest{Body: body, ConnectorID: connector_id} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ConnectorUpdateFeatures updates the connector features in the connector document. +// +// This API is experimental. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-features-api.html. +type ConnectorUpdateFeatures func(body io.Reader, connector_id string, o ...func(*ConnectorUpdateFeaturesRequest)) (*Response, error) + +// ConnectorUpdateFeaturesRequest configures the Connector Update Features API request. +type ConnectorUpdateFeaturesRequest struct { + Body io.Reader + + ConnectorID string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ConnectorUpdateFeaturesRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "connector.update_features") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "PUT" + + path.Grow(7 + 1 + len("_connector") + 1 + len(r.ConnectorID) + 1 + len("_features")) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_connector") + path.WriteString("/") + path.WriteString(r.ConnectorID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "connector_id", r.ConnectorID) + } + path.WriteString("/") + path.WriteString("_features") + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "connector.update_features") + if reader := instrument.RecordRequestBody(ctx, "connector.update_features", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "connector.update_features") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ConnectorUpdateFeatures) WithContext(v context.Context) func(*ConnectorUpdateFeaturesRequest) { + return func(r *ConnectorUpdateFeaturesRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ConnectorUpdateFeatures) WithPretty() func(*ConnectorUpdateFeaturesRequest) { + return func(r *ConnectorUpdateFeaturesRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ConnectorUpdateFeatures) WithHuman() func(*ConnectorUpdateFeaturesRequest) { + return func(r *ConnectorUpdateFeaturesRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ConnectorUpdateFeatures) WithErrorTrace() func(*ConnectorUpdateFeaturesRequest) { + return func(r *ConnectorUpdateFeaturesRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ConnectorUpdateFeatures) WithFilterPath(v ...string) func(*ConnectorUpdateFeaturesRequest) { + return func(r *ConnectorUpdateFeaturesRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ConnectorUpdateFeatures) WithHeader(h map[string]string) func(*ConnectorUpdateFeaturesRequest) { + return func(r *ConnectorUpdateFeaturesRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ConnectorUpdateFeatures) WithOpaqueID(s string) func(*ConnectorUpdateFeaturesRequest) { + return func(r *ConnectorUpdateFeaturesRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.connector.update_filtering.go b/esapi/api.connector.update_filtering.go index e8074202d9..27569f07ca 100644 --- a/esapi/api.connector.update_filtering.go +++ b/esapi/api.connector.update_filtering.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_filtering_validation.go b/esapi/api.connector.update_filtering_validation.go new file mode 100644 index 0000000000..b642fb1dd9 --- /dev/null +++ b/esapi/api.connector.update_filtering_validation.go @@ -0,0 +1,239 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.15.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newConnectorUpdateFilteringValidationFunc(t Transport) ConnectorUpdateFilteringValidation { + return func(body io.Reader, connector_id string, o ...func(*ConnectorUpdateFilteringValidationRequest)) (*Response, error) { + var r = ConnectorUpdateFilteringValidationRequest{Body: body, ConnectorID: connector_id} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ConnectorUpdateFilteringValidation updates the validation info of the draft filtering rules. +// +// This API is experimental. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-filtering-api.html. +type ConnectorUpdateFilteringValidation func(body io.Reader, connector_id string, o ...func(*ConnectorUpdateFilteringValidationRequest)) (*Response, error) + +// ConnectorUpdateFilteringValidationRequest configures the Connector Update Filtering Validation API request. +type ConnectorUpdateFilteringValidationRequest struct { + Body io.Reader + + ConnectorID string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ConnectorUpdateFilteringValidationRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "connector.update_filtering_validation") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "PUT" + + path.Grow(7 + 1 + len("_connector") + 1 + len(r.ConnectorID) + 1 + len("_filtering") + 1 + len("_validation")) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_connector") + path.WriteString("/") + path.WriteString(r.ConnectorID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "connector_id", r.ConnectorID) + } + path.WriteString("/") + path.WriteString("_filtering") + path.WriteString("/") + path.WriteString("_validation") + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "connector.update_filtering_validation") + if reader := instrument.RecordRequestBody(ctx, "connector.update_filtering_validation", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "connector.update_filtering_validation") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ConnectorUpdateFilteringValidation) WithContext(v context.Context) func(*ConnectorUpdateFilteringValidationRequest) { + return func(r *ConnectorUpdateFilteringValidationRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ConnectorUpdateFilteringValidation) WithPretty() func(*ConnectorUpdateFilteringValidationRequest) { + return func(r *ConnectorUpdateFilteringValidationRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ConnectorUpdateFilteringValidation) WithHuman() func(*ConnectorUpdateFilteringValidationRequest) { + return func(r *ConnectorUpdateFilteringValidationRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ConnectorUpdateFilteringValidation) WithErrorTrace() func(*ConnectorUpdateFilteringValidationRequest) { + return func(r *ConnectorUpdateFilteringValidationRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ConnectorUpdateFilteringValidation) WithFilterPath(v ...string) func(*ConnectorUpdateFilteringValidationRequest) { + return func(r *ConnectorUpdateFilteringValidationRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ConnectorUpdateFilteringValidation) WithHeader(h map[string]string) func(*ConnectorUpdateFilteringValidationRequest) { + return func(r *ConnectorUpdateFilteringValidationRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ConnectorUpdateFilteringValidation) WithOpaqueID(s string) func(*ConnectorUpdateFilteringValidationRequest) { + return func(r *ConnectorUpdateFilteringValidationRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.connector.update_index_name.go b/esapi/api.connector.update_index_name.go index 7855c3eb78..44d5d96fc3 100644 --- a/esapi/api.connector.update_index_name.go +++ b/esapi/api.connector.update_index_name.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_name.go b/esapi/api.connector.update_name.go index 01c770ff03..6349b5faed 100644 --- a/esapi/api.connector.update_name.go +++ b/esapi/api.connector.update_name.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_native.go b/esapi/api.connector.update_native.go index 914024c950..3498f0c2eb 100644 --- a/esapi/api.connector.update_native.go +++ b/esapi/api.connector.update_native.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_pipeline.go b/esapi/api.connector.update_pipeline.go index 8c565a8a14..cff2345341 100644 --- a/esapi/api.connector.update_pipeline.go +++ b/esapi/api.connector.update_pipeline.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_scheduling.go b/esapi/api.connector.update_scheduling.go index dba1f573f1..cf876de5f5 100644 --- a/esapi/api.connector.update_scheduling.go +++ b/esapi/api.connector.update_scheduling.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_service_type.go b/esapi/api.connector.update_service_type.go index d0d0abc2d1..5672072f19 100644 --- a/esapi/api.connector.update_service_type.go +++ b/esapi/api.connector.update_service_type.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.connector.update_status.go b/esapi/api.connector.update_status.go index deb3fe0837..1ec5839174 100644 --- a/esapi/api.connector.update_status.go +++ b/esapi/api.connector.update_status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.count.go b/esapi/api.count.go index c4fbbd349c..10c3ce68f5 100644 --- a/esapi/api.count.go +++ b/esapi/api.count.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.create.go b/esapi/api.create.go index 8520c24906..54dd540494 100644 --- a/esapi/api.create.go +++ b/esapi/api.create.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.dangling_indices.delete_dangling_index.go b/esapi/api.dangling_indices.delete_dangling_index.go index 73ae365a64..338dd623d4 100644 --- a/esapi/api.dangling_indices.delete_dangling_index.go +++ b/esapi/api.dangling_indices.delete_dangling_index.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.dangling_indices.import_dangling_index.go b/esapi/api.dangling_indices.import_dangling_index.go index b3281b924f..3dc6668bb4 100644 --- a/esapi/api.dangling_indices.import_dangling_index.go +++ b/esapi/api.dangling_indices.import_dangling_index.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.dangling_indices.list_dangling_indices.go b/esapi/api.dangling_indices.list_dangling_indices.go index 381f183118..3adaddf449 100644 --- a/esapi/api.dangling_indices.list_dangling_indices.go +++ b/esapi/api.dangling_indices.list_dangling_indices.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.delete.go b/esapi/api.delete.go index afe2be5b9c..9bbdd2c83c 100644 --- a/esapi/api.delete.go +++ b/esapi/api.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.delete_by_query.go b/esapi/api.delete_by_query.go index 7119786507..e3811877df 100644 --- a/esapi/api.delete_by_query.go +++ b/esapi/api.delete_by_query.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.delete_by_query_rethrottle.go b/esapi/api.delete_by_query_rethrottle.go index 4e769033dc..3b5c3a78ec 100644 --- a/esapi/api.delete_by_query_rethrottle.go +++ b/esapi/api.delete_by_query_rethrottle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.delete_script.go b/esapi/api.delete_script.go index 5959779247..1e0b000da7 100644 --- a/esapi/api.delete_script.go +++ b/esapi/api.delete_script.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.exists.go b/esapi/api.exists.go index 1473fba103..405f4cb042 100644 --- a/esapi/api.exists.go +++ b/esapi/api.exists.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.exists_source.go b/esapi/api.exists_source.go index 04486c912a..56e96cb498 100644 --- a/esapi/api.exists_source.go +++ b/esapi/api.exists_source.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.explain.go b/esapi/api.explain.go index ebec6fce29..336d45a8bb 100644 --- a/esapi/api.explain.go +++ b/esapi/api.explain.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.features.get_features.go b/esapi/api.features.get_features.go index 955901cbd5..e916477d1f 100644 --- a/esapi/api.features.get_features.go +++ b/esapi/api.features.get_features.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.features.reset_features.go b/esapi/api.features.reset_features.go index 2f59a56acc..f665d00255 100644 --- a/esapi/api.features.reset_features.go +++ b/esapi/api.features.reset_features.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newFeaturesResetFeaturesFunc(t Transport) FeaturesResetFeatures { @@ -51,6 +52,8 @@ type FeaturesResetFeatures func(o ...func(*FeaturesResetFeaturesRequest)) (*Resp // FeaturesResetFeaturesRequest configures the Features Reset Features API request. type FeaturesResetFeaturesRequest struct { + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -88,6 +91,10 @@ func (r FeaturesResetFeaturesRequest) Do(providedCtx context.Context, transport params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -166,6 +173,13 @@ func (f FeaturesResetFeatures) WithContext(v context.Context) func(*FeaturesRese } } +// WithMasterTimeout - explicit operation timeout for connection to master node. +func (f FeaturesResetFeatures) WithMasterTimeout(v time.Duration) func(*FeaturesResetFeaturesRequest) { + return func(r *FeaturesResetFeaturesRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f FeaturesResetFeatures) WithPretty() func(*FeaturesResetFeaturesRequest) { return func(r *FeaturesResetFeaturesRequest) { diff --git a/esapi/api.field_caps.go b/esapi/api.field_caps.go index 8844d94f80..128dee6a41 100644 --- a/esapi/api.field_caps.go +++ b/esapi/api.field_caps.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.fleet.delete_secret.go b/esapi/api.fleet.delete_secret.go index ea600c05a2..14b74425d2 100644 --- a/esapi/api.fleet.delete_secret.go +++ b/esapi/api.fleet.delete_secret.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.fleet.get_secret.go b/esapi/api.fleet.get_secret.go index 9dd92c0bfe..0c270c1ca2 100644 --- a/esapi/api.fleet.get_secret.go +++ b/esapi/api.fleet.get_secret.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.fleet.global_checkpoints.go b/esapi/api.fleet.global_checkpoints.go index 5def5ced62..dbbe9a0040 100644 --- a/esapi/api.fleet.global_checkpoints.go +++ b/esapi/api.fleet.global_checkpoints.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.fleet.msearch.go b/esapi/api.fleet.msearch.go index 8b6e0af20d..aff720ce3b 100644 --- a/esapi/api.fleet.msearch.go +++ b/esapi/api.fleet.msearch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.fleet.post_secret.go b/esapi/api.fleet.post_secret.go index 318ec212cb..b77503d4c2 100644 --- a/esapi/api.fleet.post_secret.go +++ b/esapi/api.fleet.post_secret.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.fleet.search.go b/esapi/api.fleet.search.go index d562518cd2..03e4cc7d9b 100644 --- a/esapi/api.fleet.search.go +++ b/esapi/api.fleet.search.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.get.go b/esapi/api.get.go index 639a90796b..a2fd07e431 100644 --- a/esapi/api.get.go +++ b/esapi/api.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.get_script.go b/esapi/api.get_script.go index 73c867e7ae..77a14ed95b 100644 --- a/esapi/api.get_script.go +++ b/esapi/api.get_script.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.get_script_context.go b/esapi/api.get_script_context.go index d684c48ec0..c9bfa81e4e 100644 --- a/esapi/api.get_script_context.go +++ b/esapi/api.get_script_context.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.get_script_languages.go b/esapi/api.get_script_languages.go index 1ee8de6b2d..fadb88d332 100644 --- a/esapi/api.get_script_languages.go +++ b/esapi/api.get_script_languages.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.get_source.go b/esapi/api.get_source.go index 819e1bf4ce..e5d6f8eaa3 100644 --- a/esapi/api.get_source.go +++ b/esapi/api.get_source.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.health_report.go b/esapi/api.health_report.go index ecf7962a89..ebf2f5636e 100644 --- a/esapi/api.health_report.go +++ b/esapi/api.health_report.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.index.go b/esapi/api.index.go index 346b77bf70..b6c0833feb 100644 --- a/esapi/api.index.go +++ b/esapi/api.index.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.add_block.go b/esapi/api.indices.add_block.go index 784364cbff..20ecf5a0c2 100644 --- a/esapi/api.indices.add_block.go +++ b/esapi/api.indices.add_block.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.analyze.go b/esapi/api.indices.analyze.go index 19076d3115..66072ccd13 100644 --- a/esapi/api.indices.analyze.go +++ b/esapi/api.indices.analyze.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.clear_cache.go b/esapi/api.indices.clear_cache.go index f16d9baa25..0b86ecaebd 100644 --- a/esapi/api.indices.clear_cache.go +++ b/esapi/api.indices.clear_cache.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.clone.go b/esapi/api.indices.clone.go index 7e8e39cad4..556d5f0045 100644 --- a/esapi/api.indices.clone.go +++ b/esapi/api.indices.clone.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.close.go b/esapi/api.indices.close.go index 80c483f390..c971168359 100644 --- a/esapi/api.indices.close.go +++ b/esapi/api.indices.close.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.create.go b/esapi/api.indices.create.go index abc47fec55..7011b8469b 100644 --- a/esapi/api.indices.create.go +++ b/esapi/api.indices.create.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.delete.go b/esapi/api.indices.delete.go index f7a6e28153..d27f1ca7ad 100644 --- a/esapi/api.indices.delete.go +++ b/esapi/api.indices.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.delete_alias.go b/esapi/api.indices.delete_alias.go index 5261165c38..b9d34f743d 100644 --- a/esapi/api.indices.delete_alias.go +++ b/esapi/api.indices.delete_alias.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.delete_data_lifecycle.go b/esapi/api.indices.delete_data_lifecycle.go index 03056e3d8d..b6b4a4e649 100644 --- a/esapi/api.indices.delete_data_lifecycle.go +++ b/esapi/api.indices.delete_data_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.delete_index_template.go b/esapi/api.indices.delete_index_template.go index 66f7eaebcf..329b7702c7 100644 --- a/esapi/api.indices.delete_index_template.go +++ b/esapi/api.indices.delete_index_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.delete_template.go b/esapi/api.indices.delete_template.go index 8ce98d4f4d..8c459302ff 100644 --- a/esapi/api.indices.delete_template.go +++ b/esapi/api.indices.delete_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.disk_usage.go b/esapi/api.indices.disk_usage.go index 5797aefa64..d25e02531e 100644 --- a/esapi/api.indices.disk_usage.go +++ b/esapi/api.indices.disk_usage.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.downsample.go b/esapi/api.indices.downsample.go index ab4027e7a2..595fb79408 100644 --- a/esapi/api.indices.downsample.go +++ b/esapi/api.indices.downsample.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.exists.go b/esapi/api.indices.exists.go index 1b4a9aefad..78912b44ec 100644 --- a/esapi/api.indices.exists.go +++ b/esapi/api.indices.exists.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.exists_alias.go b/esapi/api.indices.exists_alias.go index 4687d76b1b..c44c8c29e8 100644 --- a/esapi/api.indices.exists_alias.go +++ b/esapi/api.indices.exists_alias.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.exists_index_template.go b/esapi/api.indices.exists_index_template.go index 9648c333dd..ef9cc60078 100644 --- a/esapi/api.indices.exists_index_template.go +++ b/esapi/api.indices.exists_index_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.exists_template.go b/esapi/api.indices.exists_template.go index bfb1b66ced..52199e6ab9 100644 --- a/esapi/api.indices.exists_template.go +++ b/esapi/api.indices.exists_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.explain_data_lifecycle.go b/esapi/api.indices.explain_data_lifecycle.go index 3db5673812..39252a3fc3 100644 --- a/esapi/api.indices.explain_data_lifecycle.go +++ b/esapi/api.indices.explain_data_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.field_usage_stats.go b/esapi/api.indices.field_usage_stats.go index ebd34f8707..138615c587 100644 --- a/esapi/api.indices.field_usage_stats.go +++ b/esapi/api.indices.field_usage_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.flush.go b/esapi/api.indices.flush.go index 7035a694c3..4c27e45ab0 100644 --- a/esapi/api.indices.flush.go +++ b/esapi/api.indices.flush.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.forcemerge.go b/esapi/api.indices.forcemerge.go index 33a931ff87..3c2065e12c 100644 --- a/esapi/api.indices.forcemerge.go +++ b/esapi/api.indices.forcemerge.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get.go b/esapi/api.indices.get.go index d3c18ccde2..34be93e374 100644 --- a/esapi/api.indices.get.go +++ b/esapi/api.indices.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get_alias.go b/esapi/api.indices.get_alias.go index b5814c9fb5..aa9e704467 100644 --- a/esapi/api.indices.get_alias.go +++ b/esapi/api.indices.get_alias.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get_data_lifecycle.go b/esapi/api.indices.get_data_lifecycle.go index 8210a8a697..abb3eb8d82 100644 --- a/esapi/api.indices.get_data_lifecycle.go +++ b/esapi/api.indices.get_data_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get_field_mapping.go b/esapi/api.indices.get_field_mapping.go index 140a89be9e..533c7f361b 100644 --- a/esapi/api.indices.get_field_mapping.go +++ b/esapi/api.indices.get_field_mapping.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get_index_template.go b/esapi/api.indices.get_index_template.go index 57af125158..423f2e2c5f 100644 --- a/esapi/api.indices.get_index_template.go +++ b/esapi/api.indices.get_index_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get_mapping.go b/esapi/api.indices.get_mapping.go index 9de47c3f1f..b9b386c5c0 100644 --- a/esapi/api.indices.get_mapping.go +++ b/esapi/api.indices.get_mapping.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get_settings.go b/esapi/api.indices.get_settings.go index 7ee9135489..0d7c5ca1bb 100644 --- a/esapi/api.indices.get_settings.go +++ b/esapi/api.indices.get_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.get_template.go b/esapi/api.indices.get_template.go index f3642e04e8..8d8fd41522 100644 --- a/esapi/api.indices.get_template.go +++ b/esapi/api.indices.get_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.modify_data_stream.go b/esapi/api.indices.modify_data_stream.go index 8e3a5bdeb9..6a502150f9 100644 --- a/esapi/api.indices.modify_data_stream.go +++ b/esapi/api.indices.modify_data_stream.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.open.go b/esapi/api.indices.open.go index 58880388f5..e4e7a08aec 100644 --- a/esapi/api.indices.open.go +++ b/esapi/api.indices.open.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.put_alias.go b/esapi/api.indices.put_alias.go index 461d19f5c8..1f72927377 100644 --- a/esapi/api.indices.put_alias.go +++ b/esapi/api.indices.put_alias.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.put_data_lifecycle.go b/esapi/api.indices.put_data_lifecycle.go index efbe8b33c5..925a2fb14d 100644 --- a/esapi/api.indices.put_data_lifecycle.go +++ b/esapi/api.indices.put_data_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.put_index_template.go b/esapi/api.indices.put_index_template.go index 2095f86639..22de545ae0 100644 --- a/esapi/api.indices.put_index_template.go +++ b/esapi/api.indices.put_index_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.put_mapping.go b/esapi/api.indices.put_mapping.go index 3caa804b0b..b6d652d5c0 100644 --- a/esapi/api.indices.put_mapping.go +++ b/esapi/api.indices.put_mapping.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.put_settings.go b/esapi/api.indices.put_settings.go index 2f709fa3ac..c0fed00e3a 100644 --- a/esapi/api.indices.put_settings.go +++ b/esapi/api.indices.put_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.put_template.go b/esapi/api.indices.put_template.go index 7fe664efc1..0c8313ebb4 100644 --- a/esapi/api.indices.put_template.go +++ b/esapi/api.indices.put_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.recovery.go b/esapi/api.indices.recovery.go index 47c98c089f..090ac2544d 100644 --- a/esapi/api.indices.recovery.go +++ b/esapi/api.indices.recovery.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.refresh.go b/esapi/api.indices.refresh.go index c4d0b52001..7cb5c3f3e9 100644 --- a/esapi/api.indices.refresh.go +++ b/esapi/api.indices.refresh.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.resolve_cluster.go b/esapi/api.indices.resolve_cluster.go index 865f508d50..0da1eec04c 100644 --- a/esapi/api.indices.resolve_cluster.go +++ b/esapi/api.indices.resolve_cluster.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.resolve_index.go b/esapi/api.indices.resolve_index.go index b7b7cee33c..7c0b952a2a 100644 --- a/esapi/api.indices.resolve_index.go +++ b/esapi/api.indices.resolve_index.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.rollover.go b/esapi/api.indices.rollover.go index b484ed9928..5137173249 100644 --- a/esapi/api.indices.rollover.go +++ b/esapi/api.indices.rollover.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -61,6 +61,7 @@ type IndicesRolloverRequest struct { DryRun *bool Lazy *bool MasterTimeout time.Duration + TargetFailureStore *bool Timeout time.Duration WaitForActiveShards string @@ -126,6 +127,10 @@ func (r IndicesRolloverRequest) Do(providedCtx context.Context, transport Transp params["master_timeout"] = formatDuration(r.MasterTimeout) } + if r.TargetFailureStore != nil { + params["target_failure_store"] = strconv.FormatBool(*r.TargetFailureStore) + } + if r.Timeout != 0 { params["timeout"] = formatDuration(r.Timeout) } @@ -254,6 +259,13 @@ func (f IndicesRollover) WithMasterTimeout(v time.Duration) func(*IndicesRollove } } +// WithTargetFailureStore - if set to true, the rollover action will be applied on the failure store of the data stream.. +func (f IndicesRollover) WithTargetFailureStore(v bool) func(*IndicesRolloverRequest) { + return func(r *IndicesRolloverRequest) { + r.TargetFailureStore = &v + } +} + // WithTimeout - explicit operation timeout. func (f IndicesRollover) WithTimeout(v time.Duration) func(*IndicesRolloverRequest) { return func(r *IndicesRolloverRequest) { diff --git a/esapi/api.indices.segments.go b/esapi/api.indices.segments.go index 7a4c641dc0..c58365e3f9 100644 --- a/esapi/api.indices.segments.go +++ b/esapi/api.indices.segments.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.shard_stores.go b/esapi/api.indices.shard_stores.go index adf62b94c3..3da39039c1 100644 --- a/esapi/api.indices.shard_stores.go +++ b/esapi/api.indices.shard_stores.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.shrink.go b/esapi/api.indices.shrink.go index 58140aaf18..b562872974 100644 --- a/esapi/api.indices.shrink.go +++ b/esapi/api.indices.shrink.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.simulate_index_template.go b/esapi/api.indices.simulate_index_template.go index ed040b8fbf..27732e6110 100644 --- a/esapi/api.indices.simulate_index_template.go +++ b/esapi/api.indices.simulate_index_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.simulate_template.go b/esapi/api.indices.simulate_template.go index 243eb758ac..d5492f7a12 100644 --- a/esapi/api.indices.simulate_template.go +++ b/esapi/api.indices.simulate_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.split.go b/esapi/api.indices.split.go index a64e10c286..7d319e23b5 100644 --- a/esapi/api.indices.split.go +++ b/esapi/api.indices.split.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.stats.go b/esapi/api.indices.stats.go index 14db7ae4ff..6f9d26d28a 100644 --- a/esapi/api.indices.stats.go +++ b/esapi/api.indices.stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.update_aliases.go b/esapi/api.indices.update_aliases.go index 962b263b95..d7dd7a8d04 100644 --- a/esapi/api.indices.update_aliases.go +++ b/esapi/api.indices.update_aliases.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.indices.validate_query.go b/esapi/api.indices.validate_query.go index 4515a90713..818fe0896c 100644 --- a/esapi/api.indices.validate_query.go +++ b/esapi/api.indices.validate_query.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.inference.delete_model.go b/esapi/api.inference.delete.go similarity index 64% rename from esapi/api.inference.delete_model.go rename to esapi/api.inference.delete.go index 297d45d4e2..4dea425dba 100644 --- a/esapi/api.inference.delete_model.go +++ b/esapi/api.inference.delete.go @@ -15,19 +15,20 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi import ( "context" "net/http" + "strconv" "strings" ) -func newInferenceDeleteModelFunc(t Transport) InferenceDeleteModel { - return func(inference_id string, o ...func(*InferenceDeleteModelRequest)) (*Response, error) { - var r = InferenceDeleteModelRequest{InferenceID: inference_id} +func newInferenceDeleteFunc(t Transport) InferenceDelete { + return func(inference_id string, o ...func(*InferenceDeleteRequest)) (*Response, error) { + var r = InferenceDeleteRequest{InferenceID: inference_id} for _, f := range o { f(&r) } @@ -42,18 +43,21 @@ func newInferenceDeleteModelFunc(t Transport) InferenceDeleteModel { // ----- API Definition ------------------------------------------------------- -// InferenceDeleteModel delete model in the Inference API +// InferenceDelete delete an inference endpoint // // This API is experimental. // // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-inference-api.html. -type InferenceDeleteModel func(inference_id string, o ...func(*InferenceDeleteModelRequest)) (*Response, error) +type InferenceDelete func(inference_id string, o ...func(*InferenceDeleteRequest)) (*Response, error) -// InferenceDeleteModelRequest configures the Inference Delete Model API request. -type InferenceDeleteModelRequest struct { +// InferenceDeleteRequest configures the Inference Delete API request. +type InferenceDeleteRequest struct { InferenceID string TaskType string + DryRun *bool + Force *bool + Pretty bool Human bool ErrorTrace bool @@ -67,7 +71,7 @@ type InferenceDeleteModelRequest struct { } // Do executes the request and returns response or error. -func (r InferenceDeleteModelRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { +func (r InferenceDeleteRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { var ( method string path strings.Builder @@ -76,7 +80,7 @@ func (r InferenceDeleteModelRequest) Do(providedCtx context.Context, transport T ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "inference.delete_model") + ctx = instrument.Start(providedCtx, "inference.delete") defer instrument.Close(ctx) } if ctx == nil { @@ -104,6 +108,14 @@ func (r InferenceDeleteModelRequest) Do(providedCtx context.Context, transport T params = make(map[string]string) + if r.DryRun != nil { + params["dry_run"] = strconv.FormatBool(*r.DryRun) + } + + if r.Force != nil { + params["force"] = strconv.FormatBool(*r.Force) + } + if r.Pretty { params["pretty"] = "true" } @@ -153,11 +165,11 @@ func (r InferenceDeleteModelRequest) Do(providedCtx context.Context, transport T } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "inference.delete_model") + instrument.BeforeRequest(req, "inference.delete") } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "inference.delete_model") + instrument.AfterRequest(req, "elasticsearch", "inference.delete") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { @@ -176,50 +188,64 @@ func (r InferenceDeleteModelRequest) Do(providedCtx context.Context, transport T } // WithContext sets the request context. -func (f InferenceDeleteModel) WithContext(v context.Context) func(*InferenceDeleteModelRequest) { - return func(r *InferenceDeleteModelRequest) { +func (f InferenceDelete) WithContext(v context.Context) func(*InferenceDeleteRequest) { + return func(r *InferenceDeleteRequest) { r.ctx = v } } // WithTaskType - the task type. -func (f InferenceDeleteModel) WithTaskType(v string) func(*InferenceDeleteModelRequest) { - return func(r *InferenceDeleteModelRequest) { +func (f InferenceDelete) WithTaskType(v string) func(*InferenceDeleteRequest) { + return func(r *InferenceDeleteRequest) { r.TaskType = v } } +// WithDryRun - if true the endpoint will not be deleted and a list of ingest processors which reference this endpoint will be returned.. +func (f InferenceDelete) WithDryRun(v bool) func(*InferenceDeleteRequest) { + return func(r *InferenceDeleteRequest) { + r.DryRun = &v + } +} + +// WithForce - if true the endpoint will be forcefully stopped (regardless of whether or not it is referenced by any ingest processors or semantic text fields).. +func (f InferenceDelete) WithForce(v bool) func(*InferenceDeleteRequest) { + return func(r *InferenceDeleteRequest) { + r.Force = &v + } +} + // WithPretty makes the response body pretty-printed. -func (f InferenceDeleteModel) WithPretty() func(*InferenceDeleteModelRequest) { - return func(r *InferenceDeleteModelRequest) { +func (f InferenceDelete) WithPretty() func(*InferenceDeleteRequest) { + return func(r *InferenceDeleteRequest) { r.Pretty = true } } // WithHuman makes statistical values human-readable. -func (f InferenceDeleteModel) WithHuman() func(*InferenceDeleteModelRequest) { - return func(r *InferenceDeleteModelRequest) { +func (f InferenceDelete) WithHuman() func(*InferenceDeleteRequest) { + return func(r *InferenceDeleteRequest) { r.Human = true } } // WithErrorTrace includes the stack trace for errors in the response body. -func (f InferenceDeleteModel) WithErrorTrace() func(*InferenceDeleteModelRequest) { - return func(r *InferenceDeleteModelRequest) { +func (f InferenceDelete) WithErrorTrace() func(*InferenceDeleteRequest) { + return func(r *InferenceDeleteRequest) { r.ErrorTrace = true } } // WithFilterPath filters the properties of the response body. -func (f InferenceDeleteModel) WithFilterPath(v ...string) func(*InferenceDeleteModelRequest) { - return func(r *InferenceDeleteModelRequest) { +func (f InferenceDelete) WithFilterPath(v ...string) func(*InferenceDeleteRequest) { + return func(r *InferenceDeleteRequest) { r.FilterPath = v } } // WithHeader adds the headers to the HTTP request. -func (f InferenceDeleteModel) WithHeader(h map[string]string) func(*InferenceDeleteModelRequest) { - return func(r *InferenceDeleteModelRequest) { +func (f InferenceDelete) WithHeader(h map[string]string) func(*InferenceDeleteRequest) { + return func(r *InferenceDeleteRequest) { if r.Header == nil { r.Header = make(http.Header) } @@ -230,8 +256,8 @@ func (f InferenceDeleteModel) WithHeader(h map[string]string) func(*InferenceDel } // WithOpaqueID adds the X-Opaque-Id header to the HTTP request. -func (f InferenceDeleteModel) WithOpaqueID(s string) func(*InferenceDeleteModelRequest) { - return func(r *InferenceDeleteModelRequest) { +func (f InferenceDelete) WithOpaqueID(s string) func(*InferenceDeleteRequest) { + return func(r *InferenceDeleteRequest) { if r.Header == nil { r.Header = make(http.Header) } diff --git a/esapi/api.inference.get_model.go b/esapi/api.inference.get.go similarity index 67% rename from esapi/api.inference.get_model.go rename to esapi/api.inference.get.go index b23765799a..f2b8ff3222 100644 --- a/esapi/api.inference.get_model.go +++ b/esapi/api.inference.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -25,9 +25,9 @@ import ( "strings" ) -func newInferenceGetModelFunc(t Transport) InferenceGetModel { - return func(inference_id string, o ...func(*InferenceGetModelRequest)) (*Response, error) { - var r = InferenceGetModelRequest{InferenceID: inference_id} +func newInferenceGetFunc(t Transport) InferenceGet { + return func(o ...func(*InferenceGetRequest)) (*Response, error) { + var r = InferenceGetRequest{} for _, f := range o { f(&r) } @@ -42,15 +42,15 @@ func newInferenceGetModelFunc(t Transport) InferenceGetModel { // ----- API Definition ------------------------------------------------------- -// InferenceGetModel get a model in the Inference API +// InferenceGet get an inference endpoint // // This API is experimental. // // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/get-inference-api.html. -type InferenceGetModel func(inference_id string, o ...func(*InferenceGetModelRequest)) (*Response, error) +type InferenceGet func(o ...func(*InferenceGetRequest)) (*Response, error) -// InferenceGetModelRequest configures the Inference Get Model API request. -type InferenceGetModelRequest struct { +// InferenceGetRequest configures the Inference Get API request. +type InferenceGetRequest struct { InferenceID string TaskType string @@ -67,7 +67,7 @@ type InferenceGetModelRequest struct { } // Do executes the request and returns response or error. -func (r InferenceGetModelRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { +func (r InferenceGetRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { var ( method string path strings.Builder @@ -76,7 +76,7 @@ func (r InferenceGetModelRequest) Do(providedCtx context.Context, transport Tran ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "inference.get_model") + ctx = instrument.Start(providedCtx, "inference.get") defer instrument.Close(ctx) } if ctx == nil { @@ -96,10 +96,12 @@ func (r InferenceGetModelRequest) Do(providedCtx context.Context, transport Tran instrument.RecordPathPart(ctx, "task_type", r.TaskType) } } - path.WriteString("/") - path.WriteString(r.InferenceID) - if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.RecordPathPart(ctx, "inference_id", r.InferenceID) + if r.InferenceID != "" { + path.WriteString("/") + path.WriteString(r.InferenceID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "inference_id", r.InferenceID) + } } params = make(map[string]string) @@ -153,11 +155,11 @@ func (r InferenceGetModelRequest) Do(providedCtx context.Context, transport Tran } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "inference.get_model") + instrument.BeforeRequest(req, "inference.get") } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "inference.get_model") + instrument.AfterRequest(req, "elasticsearch", "inference.get") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { @@ -176,50 +178,57 @@ func (r InferenceGetModelRequest) Do(providedCtx context.Context, transport Tran } // WithContext sets the request context. -func (f InferenceGetModel) WithContext(v context.Context) func(*InferenceGetModelRequest) { - return func(r *InferenceGetModelRequest) { +func (f InferenceGet) WithContext(v context.Context) func(*InferenceGetRequest) { + return func(r *InferenceGetRequest) { r.ctx = v } } +// WithInferenceID - the inference ID. +func (f InferenceGet) WithInferenceID(v string) func(*InferenceGetRequest) { + return func(r *InferenceGetRequest) { + r.InferenceID = v + } +} + // WithTaskType - the task type. -func (f InferenceGetModel) WithTaskType(v string) func(*InferenceGetModelRequest) { - return func(r *InferenceGetModelRequest) { +func (f InferenceGet) WithTaskType(v string) func(*InferenceGetRequest) { + return func(r *InferenceGetRequest) { r.TaskType = v } } // WithPretty makes the response body pretty-printed. -func (f InferenceGetModel) WithPretty() func(*InferenceGetModelRequest) { - return func(r *InferenceGetModelRequest) { +func (f InferenceGet) WithPretty() func(*InferenceGetRequest) { + return func(r *InferenceGetRequest) { r.Pretty = true } } // WithHuman makes statistical values human-readable. -func (f InferenceGetModel) WithHuman() func(*InferenceGetModelRequest) { - return func(r *InferenceGetModelRequest) { +func (f InferenceGet) WithHuman() func(*InferenceGetRequest) { + return func(r *InferenceGetRequest) { r.Human = true } } // WithErrorTrace includes the stack trace for errors in the response body. -func (f InferenceGetModel) WithErrorTrace() func(*InferenceGetModelRequest) { - return func(r *InferenceGetModelRequest) { +func (f InferenceGet) WithErrorTrace() func(*InferenceGetRequest) { + return func(r *InferenceGetRequest) { r.ErrorTrace = true } } // WithFilterPath filters the properties of the response body. -func (f InferenceGetModel) WithFilterPath(v ...string) func(*InferenceGetModelRequest) { - return func(r *InferenceGetModelRequest) { +func (f InferenceGet) WithFilterPath(v ...string) func(*InferenceGetRequest) { + return func(r *InferenceGetRequest) { r.FilterPath = v } } // WithHeader adds the headers to the HTTP request. -func (f InferenceGetModel) WithHeader(h map[string]string) func(*InferenceGetModelRequest) { - return func(r *InferenceGetModelRequest) { +func (f InferenceGet) WithHeader(h map[string]string) func(*InferenceGetRequest) { + return func(r *InferenceGetRequest) { if r.Header == nil { r.Header = make(http.Header) } @@ -230,8 +239,8 @@ func (f InferenceGetModel) WithHeader(h map[string]string) func(*InferenceGetMod } // WithOpaqueID adds the X-Opaque-Id header to the HTTP request. -func (f InferenceGetModel) WithOpaqueID(s string) func(*InferenceGetModelRequest) { - return func(r *InferenceGetModelRequest) { +func (f InferenceGet) WithOpaqueID(s string) func(*InferenceGetRequest) { + return func(r *InferenceGetRequest) { if r.Header == nil { r.Header = make(http.Header) } diff --git a/esapi/api.inference.inference.go b/esapi/api.inference.inference.go index 6ab5f93139..9196e297e2 100644 --- a/esapi/api.inference.inference.go +++ b/esapi/api.inference.inference.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -43,7 +43,7 @@ func newInferenceInferenceFunc(t Transport) InferenceInference { // ----- API Definition ------------------------------------------------------- -// InferenceInference perform inference on a model +// InferenceInference perform inference // // This API is experimental. // diff --git a/esapi/api.inference.put_model.go b/esapi/api.inference.put.go similarity index 68% rename from esapi/api.inference.put_model.go rename to esapi/api.inference.put.go index 81de3264df..3927c12399 100644 --- a/esapi/api.inference.put_model.go +++ b/esapi/api.inference.put.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -26,9 +26,9 @@ import ( "strings" ) -func newInferencePutModelFunc(t Transport) InferencePutModel { - return func(inference_id string, o ...func(*InferencePutModelRequest)) (*Response, error) { - var r = InferencePutModelRequest{InferenceID: inference_id} +func newInferencePutFunc(t Transport) InferencePut { + return func(inference_id string, o ...func(*InferencePutRequest)) (*Response, error) { + var r = InferencePutRequest{InferenceID: inference_id} for _, f := range o { f(&r) } @@ -43,15 +43,15 @@ func newInferencePutModelFunc(t Transport) InferencePutModel { // ----- API Definition ------------------------------------------------------- -// InferencePutModel configure a model for use in the Inference API +// InferencePut configure an inference endpoint for use in the Inference API // // This API is experimental. // // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/put-inference-api.html. -type InferencePutModel func(inference_id string, o ...func(*InferencePutModelRequest)) (*Response, error) +type InferencePut func(inference_id string, o ...func(*InferencePutRequest)) (*Response, error) -// InferencePutModelRequest configures the Inference Put Model API request. -type InferencePutModelRequest struct { +// InferencePutRequest configures the Inference Put API request. +type InferencePutRequest struct { Body io.Reader InferenceID string @@ -70,7 +70,7 @@ type InferencePutModelRequest struct { } // Do executes the request and returns response or error. -func (r InferencePutModelRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { +func (r InferencePutRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { var ( method string path strings.Builder @@ -79,7 +79,7 @@ func (r InferencePutModelRequest) Do(providedCtx context.Context, transport Tran ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "inference.put_model") + ctx = instrument.Start(providedCtx, "inference.put") defer instrument.Close(ctx) } if ctx == nil { @@ -160,14 +160,14 @@ func (r InferencePutModelRequest) Do(providedCtx context.Context, transport Tran } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "inference.put_model") - if reader := instrument.RecordRequestBody(ctx, "inference.put_model", r.Body); reader != nil { + instrument.BeforeRequest(req, "inference.put") + if reader := instrument.RecordRequestBody(ctx, "inference.put", r.Body); reader != nil { req.Body = reader } } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "inference.put_model") + instrument.AfterRequest(req, "elasticsearch", "inference.put") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { @@ -186,57 +186,57 @@ func (r InferencePutModelRequest) Do(providedCtx context.Context, transport Tran } // WithContext sets the request context. -func (f InferencePutModel) WithContext(v context.Context) func(*InferencePutModelRequest) { - return func(r *InferencePutModelRequest) { +func (f InferencePut) WithContext(v context.Context) func(*InferencePutRequest) { + return func(r *InferencePutRequest) { r.ctx = v } } -// WithBody - The model's task and service settings. -func (f InferencePutModel) WithBody(v io.Reader) func(*InferencePutModelRequest) { - return func(r *InferencePutModelRequest) { +// WithBody - The inference endpoint's task and service settings. +func (f InferencePut) WithBody(v io.Reader) func(*InferencePutRequest) { + return func(r *InferencePutRequest) { r.Body = v } } // WithTaskType - the task type. -func (f InferencePutModel) WithTaskType(v string) func(*InferencePutModelRequest) { - return func(r *InferencePutModelRequest) { +func (f InferencePut) WithTaskType(v string) func(*InferencePutRequest) { + return func(r *InferencePutRequest) { r.TaskType = v } } // WithPretty makes the response body pretty-printed. -func (f InferencePutModel) WithPretty() func(*InferencePutModelRequest) { - return func(r *InferencePutModelRequest) { +func (f InferencePut) WithPretty() func(*InferencePutRequest) { + return func(r *InferencePutRequest) { r.Pretty = true } } // WithHuman makes statistical values human-readable. -func (f InferencePutModel) WithHuman() func(*InferencePutModelRequest) { - return func(r *InferencePutModelRequest) { +func (f InferencePut) WithHuman() func(*InferencePutRequest) { + return func(r *InferencePutRequest) { r.Human = true } } // WithErrorTrace includes the stack trace for errors in the response body. -func (f InferencePutModel) WithErrorTrace() func(*InferencePutModelRequest) { - return func(r *InferencePutModelRequest) { +func (f InferencePut) WithErrorTrace() func(*InferencePutRequest) { + return func(r *InferencePutRequest) { r.ErrorTrace = true } } // WithFilterPath filters the properties of the response body. -func (f InferencePutModel) WithFilterPath(v ...string) func(*InferencePutModelRequest) { - return func(r *InferencePutModelRequest) { +func (f InferencePut) WithFilterPath(v ...string) func(*InferencePutRequest) { + return func(r *InferencePutRequest) { r.FilterPath = v } } // WithHeader adds the headers to the HTTP request. -func (f InferencePutModel) WithHeader(h map[string]string) func(*InferencePutModelRequest) { - return func(r *InferencePutModelRequest) { +func (f InferencePut) WithHeader(h map[string]string) func(*InferencePutRequest) { + return func(r *InferencePutRequest) { if r.Header == nil { r.Header = make(http.Header) } @@ -247,8 +247,8 @@ func (f InferencePutModel) WithHeader(h map[string]string) func(*InferencePutMod } // WithOpaqueID adds the X-Opaque-Id header to the HTTP request. -func (f InferencePutModel) WithOpaqueID(s string) func(*InferencePutModelRequest) { - return func(r *InferencePutModelRequest) { +func (f InferencePut) WithOpaqueID(s string) func(*InferencePutRequest) { + return func(r *InferencePutRequest) { if r.Header == nil { r.Header = make(http.Header) } diff --git a/esapi/api.info.go b/esapi/api.info.go index fe78275a6d..9c2f237f0c 100644 --- a/esapi/api.info.go +++ b/esapi/api.info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.ingest.delete_pipeline.go b/esapi/api.ingest.delete_pipeline.go index e280a020ee..aef07fba41 100644 --- a/esapi/api.ingest.delete_pipeline.go +++ b/esapi/api.ingest.delete_pipeline.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.ingest.geo_ip_stats.go b/esapi/api.ingest.geo_ip_stats.go index db2943056a..71ab02bbd5 100644 --- a/esapi/api.ingest.geo_ip_stats.go +++ b/esapi/api.ingest.geo_ip_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.ingest.get_pipeline.go b/esapi/api.ingest.get_pipeline.go index 9bc9ab42d5..9da536605c 100644 --- a/esapi/api.ingest.get_pipeline.go +++ b/esapi/api.ingest.get_pipeline.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.ingest.processor_grok.go b/esapi/api.ingest.processor_grok.go index 87e72b6b9e..16b3779ba1 100644 --- a/esapi/api.ingest.processor_grok.go +++ b/esapi/api.ingest.processor_grok.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.ingest.put_pipeline.go b/esapi/api.ingest.put_pipeline.go index e5ebe9791b..da960fb175 100644 --- a/esapi/api.ingest.put_pipeline.go +++ b/esapi/api.ingest.put_pipeline.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.ingest.simulate.go b/esapi/api.ingest.simulate.go index f5c393aa49..e0bb9da3ef 100644 --- a/esapi/api.ingest.simulate.go +++ b/esapi/api.ingest.simulate.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.knn_search.go b/esapi/api.knn_search.go index 8dce408276..6f9ee2a974 100644 --- a/esapi/api.knn_search.go +++ b/esapi/api.knn_search.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.mget.go b/esapi/api.mget.go index acd493cd52..6411aa717a 100644 --- a/esapi/api.mget.go +++ b/esapi/api.mget.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.msearch.go b/esapi/api.msearch.go index 9a6292a020..782a127fe4 100644 --- a/esapi/api.msearch.go +++ b/esapi/api.msearch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.msearch_template.go b/esapi/api.msearch_template.go index ebc6118fcf..9fced95d0b 100644 --- a/esapi/api.msearch_template.go +++ b/esapi/api.msearch_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.mtermvectors.go b/esapi/api.mtermvectors.go index e7f97eaefc..b093ae15cc 100644 --- a/esapi/api.mtermvectors.go +++ b/esapi/api.mtermvectors.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.nodes.clear_repositories_metering_archive.go b/esapi/api.nodes.clear_repositories_metering_archive.go index 76c90ee7a4..b2ab32e3ac 100644 --- a/esapi/api.nodes.clear_repositories_metering_archive.go +++ b/esapi/api.nodes.clear_repositories_metering_archive.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.nodes.get_repositories_metering_info.go b/esapi/api.nodes.get_repositories_metering_info.go index 1f5c8241a8..bade77c97e 100644 --- a/esapi/api.nodes.get_repositories_metering_info.go +++ b/esapi/api.nodes.get_repositories_metering_info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.nodes.hot_threads.go b/esapi/api.nodes.hot_threads.go index 2c9e25730e..361402b3a7 100644 --- a/esapi/api.nodes.hot_threads.go +++ b/esapi/api.nodes.hot_threads.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.nodes.info.go b/esapi/api.nodes.info.go index 6c8df20983..64e4b97267 100644 --- a/esapi/api.nodes.info.go +++ b/esapi/api.nodes.info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.nodes.reload_secure_settings.go b/esapi/api.nodes.reload_secure_settings.go index 903e2118f3..71b51d6e18 100644 --- a/esapi/api.nodes.reload_secure_settings.go +++ b/esapi/api.nodes.reload_secure_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.nodes.stats.go b/esapi/api.nodes.stats.go index ca8de33b73..bedccf867e 100644 --- a/esapi/api.nodes.stats.go +++ b/esapi/api.nodes.stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.nodes.usage.go b/esapi/api.nodes.usage.go index fb8a922cca..9e12fe91ac 100644 --- a/esapi/api.nodes.usage.go +++ b/esapi/api.nodes.usage.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.ping.go b/esapi/api.ping.go index 5dbb0be23e..e5d7d3e393 100644 --- a/esapi/api.ping.go +++ b/esapi/api.ping.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.profiling.stacktraces.go b/esapi/api.profiling.stacktraces.go index 73cce2bbd5..2b3c884185 100644 --- a/esapi/api.profiling.stacktraces.go +++ b/esapi/api.profiling.stacktraces.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.profiling.status.go b/esapi/api.profiling.status.go index fdb2c60002..e17ec70f47 100644 --- a/esapi/api.profiling.status.go +++ b/esapi/api.profiling.status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.profiling.topn_functions.go b/esapi/api.profiling.topn_functions.go new file mode 100644 index 0000000000..0d90f28eed --- /dev/null +++ b/esapi/api.profiling.topn_functions.go @@ -0,0 +1,225 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.15.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newProfilingTopnFunctionsFunc(t Transport) ProfilingTopnFunctions { + return func(body io.Reader, o ...func(*ProfilingTopnFunctionsRequest)) (*Response, error) { + var r = ProfilingTopnFunctionsRequest{Body: body} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// ProfilingTopnFunctions extracts a list of topN functions from Universal Profiling. +// +// See full documentation at https://www.elastic.co/guide/en/observability/current/universal-profiling.html. +type ProfilingTopnFunctions func(body io.Reader, o ...func(*ProfilingTopnFunctionsRequest)) (*Response, error) + +// ProfilingTopnFunctionsRequest configures the Profiling Topn Functions API request. +type ProfilingTopnFunctionsRequest struct { + Body io.Reader + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r ProfilingTopnFunctionsRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "profiling.topn_functions") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "POST" + + path.Grow(7 + len("/_profiling/topn/functions")) + path.WriteString("http://") + path.WriteString("/_profiling/topn/functions") + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "profiling.topn_functions") + if reader := instrument.RecordRequestBody(ctx, "profiling.topn_functions", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "profiling.topn_functions") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f ProfilingTopnFunctions) WithContext(v context.Context) func(*ProfilingTopnFunctionsRequest) { + return func(r *ProfilingTopnFunctionsRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f ProfilingTopnFunctions) WithPretty() func(*ProfilingTopnFunctionsRequest) { + return func(r *ProfilingTopnFunctionsRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f ProfilingTopnFunctions) WithHuman() func(*ProfilingTopnFunctionsRequest) { + return func(r *ProfilingTopnFunctionsRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f ProfilingTopnFunctions) WithErrorTrace() func(*ProfilingTopnFunctionsRequest) { + return func(r *ProfilingTopnFunctionsRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f ProfilingTopnFunctions) WithFilterPath(v ...string) func(*ProfilingTopnFunctionsRequest) { + return func(r *ProfilingTopnFunctionsRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f ProfilingTopnFunctions) WithHeader(h map[string]string) func(*ProfilingTopnFunctionsRequest) { + return func(r *ProfilingTopnFunctionsRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f ProfilingTopnFunctions) WithOpaqueID(s string) func(*ProfilingTopnFunctionsRequest) { + return func(r *ProfilingTopnFunctionsRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.put_script.go b/esapi/api.put_script.go index e986601720..9246427c41 100644 --- a/esapi/api.put_script.go +++ b/esapi/api.put_script.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.query_rules.delete_rule.go b/esapi/api.query_rules.delete_rule.go new file mode 100644 index 0000000000..bbdb2b017e --- /dev/null +++ b/esapi/api.query_rules.delete_rule.go @@ -0,0 +1,231 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.15.0: DO NOT EDIT + +package esapi + +import ( + "context" + "net/http" + "strings" +) + +func newQueryRulesDeleteRuleFunc(t Transport) QueryRulesDeleteRule { + return func(rule_id string, ruleset_id string, o ...func(*QueryRulesDeleteRuleRequest)) (*Response, error) { + var r = QueryRulesDeleteRuleRequest{RuleID: rule_id, RulesetID: ruleset_id} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// QueryRulesDeleteRule deletes an individual query rule within a ruleset. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-query-rule.html. +type QueryRulesDeleteRule func(rule_id string, ruleset_id string, o ...func(*QueryRulesDeleteRuleRequest)) (*Response, error) + +// QueryRulesDeleteRuleRequest configures the Query Rules Delete Rule API request. +type QueryRulesDeleteRuleRequest struct { + RuleID string + RulesetID string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r QueryRulesDeleteRuleRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "query_rules.delete_rule") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "DELETE" + + path.Grow(7 + 1 + len("_query_rules") + 1 + len(r.RulesetID) + 1 + len("_rule") + 1 + len(r.RuleID)) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_query_rules") + path.WriteString("/") + path.WriteString(r.RulesetID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "ruleset_id", r.RulesetID) + } + path.WriteString("/") + path.WriteString("_rule") + path.WriteString("/") + path.WriteString(r.RuleID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "rule_id", r.RuleID) + } + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), nil) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "query_rules.delete_rule") + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "query_rules.delete_rule") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f QueryRulesDeleteRule) WithContext(v context.Context) func(*QueryRulesDeleteRuleRequest) { + return func(r *QueryRulesDeleteRuleRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f QueryRulesDeleteRule) WithPretty() func(*QueryRulesDeleteRuleRequest) { + return func(r *QueryRulesDeleteRuleRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f QueryRulesDeleteRule) WithHuman() func(*QueryRulesDeleteRuleRequest) { + return func(r *QueryRulesDeleteRuleRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f QueryRulesDeleteRule) WithErrorTrace() func(*QueryRulesDeleteRuleRequest) { + return func(r *QueryRulesDeleteRuleRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f QueryRulesDeleteRule) WithFilterPath(v ...string) func(*QueryRulesDeleteRuleRequest) { + return func(r *QueryRulesDeleteRuleRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f QueryRulesDeleteRule) WithHeader(h map[string]string) func(*QueryRulesDeleteRuleRequest) { + return func(r *QueryRulesDeleteRuleRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f QueryRulesDeleteRule) WithOpaqueID(s string) func(*QueryRulesDeleteRuleRequest) { + return func(r *QueryRulesDeleteRuleRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.query_ruleset.delete.go b/esapi/api.query_rules.delete_ruleset.go similarity index 67% rename from esapi/api.query_ruleset.delete.go rename to esapi/api.query_rules.delete_ruleset.go index cd0b8315b2..64de26e979 100644 --- a/esapi/api.query_ruleset.delete.go +++ b/esapi/api.query_rules.delete_ruleset.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -25,9 +25,9 @@ import ( "strings" ) -func newQueryRulesetDeleteFunc(t Transport) QueryRulesetDelete { - return func(ruleset_id string, o ...func(*QueryRulesetDeleteRequest)) (*Response, error) { - var r = QueryRulesetDeleteRequest{RulesetID: ruleset_id} +func newQueryRulesDeleteRulesetFunc(t Transport) QueryRulesDeleteRuleset { + return func(ruleset_id string, o ...func(*QueryRulesDeleteRulesetRequest)) (*Response, error) { + var r = QueryRulesDeleteRulesetRequest{RulesetID: ruleset_id} for _, f := range o { f(&r) } @@ -42,15 +42,13 @@ func newQueryRulesetDeleteFunc(t Transport) QueryRulesetDelete { // ----- API Definition ------------------------------------------------------- -// QueryRulesetDelete deletes a query ruleset. -// -// This API is experimental. +// QueryRulesDeleteRuleset deletes a query ruleset. // // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-query-ruleset.html. -type QueryRulesetDelete func(ruleset_id string, o ...func(*QueryRulesetDeleteRequest)) (*Response, error) +type QueryRulesDeleteRuleset func(ruleset_id string, o ...func(*QueryRulesDeleteRulesetRequest)) (*Response, error) -// QueryRulesetDeleteRequest configures the Query Ruleset Delete API request. -type QueryRulesetDeleteRequest struct { +// QueryRulesDeleteRulesetRequest configures the Query Rules Delete Ruleset API request. +type QueryRulesDeleteRulesetRequest struct { RulesetID string Pretty bool @@ -66,7 +64,7 @@ type QueryRulesetDeleteRequest struct { } // Do executes the request and returns response or error. -func (r QueryRulesetDeleteRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { +func (r QueryRulesDeleteRulesetRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { var ( method string path strings.Builder @@ -75,7 +73,7 @@ func (r QueryRulesetDeleteRequest) Do(providedCtx context.Context, transport Tra ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "query_ruleset.delete") + ctx = instrument.Start(providedCtx, "query_rules.delete_ruleset") defer instrument.Close(ctx) } if ctx == nil { @@ -145,11 +143,11 @@ func (r QueryRulesetDeleteRequest) Do(providedCtx context.Context, transport Tra } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "query_ruleset.delete") + instrument.BeforeRequest(req, "query_rules.delete_ruleset") } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "query_ruleset.delete") + instrument.AfterRequest(req, "elasticsearch", "query_rules.delete_ruleset") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { @@ -168,43 +166,43 @@ func (r QueryRulesetDeleteRequest) Do(providedCtx context.Context, transport Tra } // WithContext sets the request context. -func (f QueryRulesetDelete) WithContext(v context.Context) func(*QueryRulesetDeleteRequest) { - return func(r *QueryRulesetDeleteRequest) { +func (f QueryRulesDeleteRuleset) WithContext(v context.Context) func(*QueryRulesDeleteRulesetRequest) { + return func(r *QueryRulesDeleteRulesetRequest) { r.ctx = v } } // WithPretty makes the response body pretty-printed. -func (f QueryRulesetDelete) WithPretty() func(*QueryRulesetDeleteRequest) { - return func(r *QueryRulesetDeleteRequest) { +func (f QueryRulesDeleteRuleset) WithPretty() func(*QueryRulesDeleteRulesetRequest) { + return func(r *QueryRulesDeleteRulesetRequest) { r.Pretty = true } } // WithHuman makes statistical values human-readable. -func (f QueryRulesetDelete) WithHuman() func(*QueryRulesetDeleteRequest) { - return func(r *QueryRulesetDeleteRequest) { +func (f QueryRulesDeleteRuleset) WithHuman() func(*QueryRulesDeleteRulesetRequest) { + return func(r *QueryRulesDeleteRulesetRequest) { r.Human = true } } // WithErrorTrace includes the stack trace for errors in the response body. -func (f QueryRulesetDelete) WithErrorTrace() func(*QueryRulesetDeleteRequest) { - return func(r *QueryRulesetDeleteRequest) { +func (f QueryRulesDeleteRuleset) WithErrorTrace() func(*QueryRulesDeleteRulesetRequest) { + return func(r *QueryRulesDeleteRulesetRequest) { r.ErrorTrace = true } } // WithFilterPath filters the properties of the response body. -func (f QueryRulesetDelete) WithFilterPath(v ...string) func(*QueryRulesetDeleteRequest) { - return func(r *QueryRulesetDeleteRequest) { +func (f QueryRulesDeleteRuleset) WithFilterPath(v ...string) func(*QueryRulesDeleteRulesetRequest) { + return func(r *QueryRulesDeleteRulesetRequest) { r.FilterPath = v } } // WithHeader adds the headers to the HTTP request. -func (f QueryRulesetDelete) WithHeader(h map[string]string) func(*QueryRulesetDeleteRequest) { - return func(r *QueryRulesetDeleteRequest) { +func (f QueryRulesDeleteRuleset) WithHeader(h map[string]string) func(*QueryRulesDeleteRulesetRequest) { + return func(r *QueryRulesDeleteRulesetRequest) { if r.Header == nil { r.Header = make(http.Header) } @@ -215,8 +213,8 @@ func (f QueryRulesetDelete) WithHeader(h map[string]string) func(*QueryRulesetDe } // WithOpaqueID adds the X-Opaque-Id header to the HTTP request. -func (f QueryRulesetDelete) WithOpaqueID(s string) func(*QueryRulesetDeleteRequest) { - return func(r *QueryRulesetDeleteRequest) { +func (f QueryRulesDeleteRuleset) WithOpaqueID(s string) func(*QueryRulesDeleteRulesetRequest) { + return func(r *QueryRulesDeleteRulesetRequest) { if r.Header == nil { r.Header = make(http.Header) } diff --git a/esapi/api.query_rules.get_rule.go b/esapi/api.query_rules.get_rule.go new file mode 100644 index 0000000000..1c33d76ef6 --- /dev/null +++ b/esapi/api.query_rules.get_rule.go @@ -0,0 +1,231 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.15.0: DO NOT EDIT + +package esapi + +import ( + "context" + "net/http" + "strings" +) + +func newQueryRulesGetRuleFunc(t Transport) QueryRulesGetRule { + return func(rule_id string, ruleset_id string, o ...func(*QueryRulesGetRuleRequest)) (*Response, error) { + var r = QueryRulesGetRuleRequest{RuleID: rule_id, RulesetID: ruleset_id} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// QueryRulesGetRule returns the details about an individual query rule within a ruleset. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/get-query-rule.html. +type QueryRulesGetRule func(rule_id string, ruleset_id string, o ...func(*QueryRulesGetRuleRequest)) (*Response, error) + +// QueryRulesGetRuleRequest configures the Query Rules Get Rule API request. +type QueryRulesGetRuleRequest struct { + RuleID string + RulesetID string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r QueryRulesGetRuleRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "query_rules.get_rule") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "GET" + + path.Grow(7 + 1 + len("_query_rules") + 1 + len(r.RulesetID) + 1 + len("_rule") + 1 + len(r.RuleID)) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_query_rules") + path.WriteString("/") + path.WriteString(r.RulesetID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "ruleset_id", r.RulesetID) + } + path.WriteString("/") + path.WriteString("_rule") + path.WriteString("/") + path.WriteString(r.RuleID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "rule_id", r.RuleID) + } + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), nil) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "query_rules.get_rule") + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "query_rules.get_rule") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f QueryRulesGetRule) WithContext(v context.Context) func(*QueryRulesGetRuleRequest) { + return func(r *QueryRulesGetRuleRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f QueryRulesGetRule) WithPretty() func(*QueryRulesGetRuleRequest) { + return func(r *QueryRulesGetRuleRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f QueryRulesGetRule) WithHuman() func(*QueryRulesGetRuleRequest) { + return func(r *QueryRulesGetRuleRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f QueryRulesGetRule) WithErrorTrace() func(*QueryRulesGetRuleRequest) { + return func(r *QueryRulesGetRuleRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f QueryRulesGetRule) WithFilterPath(v ...string) func(*QueryRulesGetRuleRequest) { + return func(r *QueryRulesGetRuleRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f QueryRulesGetRule) WithHeader(h map[string]string) func(*QueryRulesGetRuleRequest) { + return func(r *QueryRulesGetRuleRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f QueryRulesGetRule) WithOpaqueID(s string) func(*QueryRulesGetRuleRequest) { + return func(r *QueryRulesGetRuleRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.query_ruleset.get.go b/esapi/api.query_rules.get_ruleset.go similarity index 68% rename from esapi/api.query_ruleset.get.go rename to esapi/api.query_rules.get_ruleset.go index c14464d2f8..092314442d 100644 --- a/esapi/api.query_ruleset.get.go +++ b/esapi/api.query_rules.get_ruleset.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -25,9 +25,9 @@ import ( "strings" ) -func newQueryRulesetGetFunc(t Transport) QueryRulesetGet { - return func(ruleset_id string, o ...func(*QueryRulesetGetRequest)) (*Response, error) { - var r = QueryRulesetGetRequest{RulesetID: ruleset_id} +func newQueryRulesGetRulesetFunc(t Transport) QueryRulesGetRuleset { + return func(ruleset_id string, o ...func(*QueryRulesGetRulesetRequest)) (*Response, error) { + var r = QueryRulesGetRulesetRequest{RulesetID: ruleset_id} for _, f := range o { f(&r) } @@ -42,15 +42,13 @@ func newQueryRulesetGetFunc(t Transport) QueryRulesetGet { // ----- API Definition ------------------------------------------------------- -// QueryRulesetGet returns the details about a query ruleset. -// -// This API is experimental. +// QueryRulesGetRuleset returns the details about a query ruleset. // // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/get-query-ruleset.html. -type QueryRulesetGet func(ruleset_id string, o ...func(*QueryRulesetGetRequest)) (*Response, error) +type QueryRulesGetRuleset func(ruleset_id string, o ...func(*QueryRulesGetRulesetRequest)) (*Response, error) -// QueryRulesetGetRequest configures the Query Ruleset Get API request. -type QueryRulesetGetRequest struct { +// QueryRulesGetRulesetRequest configures the Query Rules Get Ruleset API request. +type QueryRulesGetRulesetRequest struct { RulesetID string Pretty bool @@ -66,7 +64,7 @@ type QueryRulesetGetRequest struct { } // Do executes the request and returns response or error. -func (r QueryRulesetGetRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { +func (r QueryRulesGetRulesetRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { var ( method string path strings.Builder @@ -75,7 +73,7 @@ func (r QueryRulesetGetRequest) Do(providedCtx context.Context, transport Transp ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "query_ruleset.get") + ctx = instrument.Start(providedCtx, "query_rules.get_ruleset") defer instrument.Close(ctx) } if ctx == nil { @@ -145,11 +143,11 @@ func (r QueryRulesetGetRequest) Do(providedCtx context.Context, transport Transp } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "query_ruleset.get") + instrument.BeforeRequest(req, "query_rules.get_ruleset") } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "query_ruleset.get") + instrument.AfterRequest(req, "elasticsearch", "query_rules.get_ruleset") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { @@ -168,43 +166,43 @@ func (r QueryRulesetGetRequest) Do(providedCtx context.Context, transport Transp } // WithContext sets the request context. -func (f QueryRulesetGet) WithContext(v context.Context) func(*QueryRulesetGetRequest) { - return func(r *QueryRulesetGetRequest) { +func (f QueryRulesGetRuleset) WithContext(v context.Context) func(*QueryRulesGetRulesetRequest) { + return func(r *QueryRulesGetRulesetRequest) { r.ctx = v } } // WithPretty makes the response body pretty-printed. -func (f QueryRulesetGet) WithPretty() func(*QueryRulesetGetRequest) { - return func(r *QueryRulesetGetRequest) { +func (f QueryRulesGetRuleset) WithPretty() func(*QueryRulesGetRulesetRequest) { + return func(r *QueryRulesGetRulesetRequest) { r.Pretty = true } } // WithHuman makes statistical values human-readable. -func (f QueryRulesetGet) WithHuman() func(*QueryRulesetGetRequest) { - return func(r *QueryRulesetGetRequest) { +func (f QueryRulesGetRuleset) WithHuman() func(*QueryRulesGetRulesetRequest) { + return func(r *QueryRulesGetRulesetRequest) { r.Human = true } } // WithErrorTrace includes the stack trace for errors in the response body. -func (f QueryRulesetGet) WithErrorTrace() func(*QueryRulesetGetRequest) { - return func(r *QueryRulesetGetRequest) { +func (f QueryRulesGetRuleset) WithErrorTrace() func(*QueryRulesGetRulesetRequest) { + return func(r *QueryRulesGetRulesetRequest) { r.ErrorTrace = true } } // WithFilterPath filters the properties of the response body. -func (f QueryRulesetGet) WithFilterPath(v ...string) func(*QueryRulesetGetRequest) { - return func(r *QueryRulesetGetRequest) { +func (f QueryRulesGetRuleset) WithFilterPath(v ...string) func(*QueryRulesGetRulesetRequest) { + return func(r *QueryRulesGetRulesetRequest) { r.FilterPath = v } } // WithHeader adds the headers to the HTTP request. -func (f QueryRulesetGet) WithHeader(h map[string]string) func(*QueryRulesetGetRequest) { - return func(r *QueryRulesetGetRequest) { +func (f QueryRulesGetRuleset) WithHeader(h map[string]string) func(*QueryRulesGetRulesetRequest) { + return func(r *QueryRulesGetRulesetRequest) { if r.Header == nil { r.Header = make(http.Header) } @@ -215,8 +213,8 @@ func (f QueryRulesetGet) WithHeader(h map[string]string) func(*QueryRulesetGetRe } // WithOpaqueID adds the X-Opaque-Id header to the HTTP request. -func (f QueryRulesetGet) WithOpaqueID(s string) func(*QueryRulesetGetRequest) { - return func(r *QueryRulesetGetRequest) { +func (f QueryRulesGetRuleset) WithOpaqueID(s string) func(*QueryRulesGetRulesetRequest) { + return func(r *QueryRulesGetRulesetRequest) { if r.Header == nil { r.Header = make(http.Header) } diff --git a/esapi/api.query_ruleset.list.go b/esapi/api.query_rules.list_rulesets.go similarity index 66% rename from esapi/api.query_ruleset.list.go rename to esapi/api.query_rules.list_rulesets.go index b59e1a7b7a..fe671a8c11 100644 --- a/esapi/api.query_ruleset.list.go +++ b/esapi/api.query_rules.list_rulesets.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -26,9 +26,9 @@ import ( "strings" ) -func newQueryRulesetListFunc(t Transport) QueryRulesetList { - return func(o ...func(*QueryRulesetListRequest)) (*Response, error) { - var r = QueryRulesetListRequest{} +func newQueryRulesListRulesetsFunc(t Transport) QueryRulesListRulesets { + return func(o ...func(*QueryRulesListRulesetsRequest)) (*Response, error) { + var r = QueryRulesListRulesetsRequest{} for _, f := range o { f(&r) } @@ -43,15 +43,13 @@ func newQueryRulesetListFunc(t Transport) QueryRulesetList { // ----- API Definition ------------------------------------------------------- -// QueryRulesetList lists query rulesets. -// -// This API is experimental. +// QueryRulesListRulesets lists query rulesets. // // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/list-query-rulesets.html. -type QueryRulesetList func(o ...func(*QueryRulesetListRequest)) (*Response, error) +type QueryRulesListRulesets func(o ...func(*QueryRulesListRulesetsRequest)) (*Response, error) -// QueryRulesetListRequest configures the Query Ruleset List API request. -type QueryRulesetListRequest struct { +// QueryRulesListRulesetsRequest configures the Query Rules List Rulesets API request. +type QueryRulesListRulesetsRequest struct { From *int Size *int @@ -68,7 +66,7 @@ type QueryRulesetListRequest struct { } // Do executes the request and returns response or error. -func (r QueryRulesetListRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { +func (r QueryRulesListRulesetsRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { var ( method string path strings.Builder @@ -77,7 +75,7 @@ func (r QueryRulesetListRequest) Do(providedCtx context.Context, transport Trans ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "query_ruleset.list") + ctx = instrument.Start(providedCtx, "query_rules.list_rulesets") defer instrument.Close(ctx) } if ctx == nil { @@ -149,11 +147,11 @@ func (r QueryRulesetListRequest) Do(providedCtx context.Context, transport Trans } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "query_ruleset.list") + instrument.BeforeRequest(req, "query_rules.list_rulesets") } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "query_ruleset.list") + instrument.AfterRequest(req, "elasticsearch", "query_rules.list_rulesets") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { @@ -172,57 +170,57 @@ func (r QueryRulesetListRequest) Do(providedCtx context.Context, transport Trans } // WithContext sets the request context. -func (f QueryRulesetList) WithContext(v context.Context) func(*QueryRulesetListRequest) { - return func(r *QueryRulesetListRequest) { +func (f QueryRulesListRulesets) WithContext(v context.Context) func(*QueryRulesListRulesetsRequest) { + return func(r *QueryRulesListRulesetsRequest) { r.ctx = v } } // WithFrom - starting offset (default: 0). -func (f QueryRulesetList) WithFrom(v int) func(*QueryRulesetListRequest) { - return func(r *QueryRulesetListRequest) { +func (f QueryRulesListRulesets) WithFrom(v int) func(*QueryRulesListRulesetsRequest) { + return func(r *QueryRulesListRulesetsRequest) { r.From = &v } } // WithSize - specifies a max number of results to get (default: 100). -func (f QueryRulesetList) WithSize(v int) func(*QueryRulesetListRequest) { - return func(r *QueryRulesetListRequest) { +func (f QueryRulesListRulesets) WithSize(v int) func(*QueryRulesListRulesetsRequest) { + return func(r *QueryRulesListRulesetsRequest) { r.Size = &v } } // WithPretty makes the response body pretty-printed. -func (f QueryRulesetList) WithPretty() func(*QueryRulesetListRequest) { - return func(r *QueryRulesetListRequest) { +func (f QueryRulesListRulesets) WithPretty() func(*QueryRulesListRulesetsRequest) { + return func(r *QueryRulesListRulesetsRequest) { r.Pretty = true } } // WithHuman makes statistical values human-readable. -func (f QueryRulesetList) WithHuman() func(*QueryRulesetListRequest) { - return func(r *QueryRulesetListRequest) { +func (f QueryRulesListRulesets) WithHuman() func(*QueryRulesListRulesetsRequest) { + return func(r *QueryRulesListRulesetsRequest) { r.Human = true } } // WithErrorTrace includes the stack trace for errors in the response body. -func (f QueryRulesetList) WithErrorTrace() func(*QueryRulesetListRequest) { - return func(r *QueryRulesetListRequest) { +func (f QueryRulesListRulesets) WithErrorTrace() func(*QueryRulesListRulesetsRequest) { + return func(r *QueryRulesListRulesetsRequest) { r.ErrorTrace = true } } // WithFilterPath filters the properties of the response body. -func (f QueryRulesetList) WithFilterPath(v ...string) func(*QueryRulesetListRequest) { - return func(r *QueryRulesetListRequest) { +func (f QueryRulesListRulesets) WithFilterPath(v ...string) func(*QueryRulesListRulesetsRequest) { + return func(r *QueryRulesListRulesetsRequest) { r.FilterPath = v } } // WithHeader adds the headers to the HTTP request. -func (f QueryRulesetList) WithHeader(h map[string]string) func(*QueryRulesetListRequest) { - return func(r *QueryRulesetListRequest) { +func (f QueryRulesListRulesets) WithHeader(h map[string]string) func(*QueryRulesListRulesetsRequest) { + return func(r *QueryRulesListRulesetsRequest) { if r.Header == nil { r.Header = make(http.Header) } @@ -233,8 +231,8 @@ func (f QueryRulesetList) WithHeader(h map[string]string) func(*QueryRulesetList } // WithOpaqueID adds the X-Opaque-Id header to the HTTP request. -func (f QueryRulesetList) WithOpaqueID(s string) func(*QueryRulesetListRequest) { - return func(r *QueryRulesetListRequest) { +func (f QueryRulesListRulesets) WithOpaqueID(s string) func(*QueryRulesListRulesetsRequest) { + return func(r *QueryRulesListRulesetsRequest) { if r.Header == nil { r.Header = make(http.Header) } diff --git a/esapi/api.query_rules.put_rule.go b/esapi/api.query_rules.put_rule.go new file mode 100644 index 0000000000..36b24e8208 --- /dev/null +++ b/esapi/api.query_rules.put_rule.go @@ -0,0 +1,241 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.15.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newQueryRulesPutRuleFunc(t Transport) QueryRulesPutRule { + return func(body io.Reader, rule_id string, ruleset_id string, o ...func(*QueryRulesPutRuleRequest)) (*Response, error) { + var r = QueryRulesPutRuleRequest{Body: body, RuleID: rule_id, RulesetID: ruleset_id} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// QueryRulesPutRule creates or updates a query rule within a ruleset. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/put-query-rule.html. +type QueryRulesPutRule func(body io.Reader, rule_id string, ruleset_id string, o ...func(*QueryRulesPutRuleRequest)) (*Response, error) + +// QueryRulesPutRuleRequest configures the Query Rules Put Rule API request. +type QueryRulesPutRuleRequest struct { + Body io.Reader + + RuleID string + RulesetID string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r QueryRulesPutRuleRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "query_rules.put_rule") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "PUT" + + path.Grow(7 + 1 + len("_query_rules") + 1 + len(r.RulesetID) + 1 + len("_rule") + 1 + len(r.RuleID)) + path.WriteString("http://") + path.WriteString("/") + path.WriteString("_query_rules") + path.WriteString("/") + path.WriteString(r.RulesetID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "ruleset_id", r.RulesetID) + } + path.WriteString("/") + path.WriteString("_rule") + path.WriteString("/") + path.WriteString(r.RuleID) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordPathPart(ctx, "rule_id", r.RuleID) + } + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "query_rules.put_rule") + if reader := instrument.RecordRequestBody(ctx, "query_rules.put_rule", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "query_rules.put_rule") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f QueryRulesPutRule) WithContext(v context.Context) func(*QueryRulesPutRuleRequest) { + return func(r *QueryRulesPutRuleRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f QueryRulesPutRule) WithPretty() func(*QueryRulesPutRuleRequest) { + return func(r *QueryRulesPutRuleRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f QueryRulesPutRule) WithHuman() func(*QueryRulesPutRuleRequest) { + return func(r *QueryRulesPutRuleRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f QueryRulesPutRule) WithErrorTrace() func(*QueryRulesPutRuleRequest) { + return func(r *QueryRulesPutRuleRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f QueryRulesPutRule) WithFilterPath(v ...string) func(*QueryRulesPutRuleRequest) { + return func(r *QueryRulesPutRuleRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f QueryRulesPutRule) WithHeader(h map[string]string) func(*QueryRulesPutRuleRequest) { + return func(r *QueryRulesPutRuleRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f QueryRulesPutRule) WithOpaqueID(s string) func(*QueryRulesPutRuleRequest) { + return func(r *QueryRulesPutRuleRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.query_ruleset.put.go b/esapi/api.query_rules.put_ruleset.go similarity index 68% rename from esapi/api.query_ruleset.put.go rename to esapi/api.query_rules.put_ruleset.go index 2c010ec77e..c33f5b9b85 100644 --- a/esapi/api.query_ruleset.put.go +++ b/esapi/api.query_rules.put_ruleset.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -26,9 +26,9 @@ import ( "strings" ) -func newQueryRulesetPutFunc(t Transport) QueryRulesetPut { - return func(body io.Reader, ruleset_id string, o ...func(*QueryRulesetPutRequest)) (*Response, error) { - var r = QueryRulesetPutRequest{Body: body, RulesetID: ruleset_id} +func newQueryRulesPutRulesetFunc(t Transport) QueryRulesPutRuleset { + return func(body io.Reader, ruleset_id string, o ...func(*QueryRulesPutRulesetRequest)) (*Response, error) { + var r = QueryRulesPutRulesetRequest{Body: body, RulesetID: ruleset_id} for _, f := range o { f(&r) } @@ -43,15 +43,13 @@ func newQueryRulesetPutFunc(t Transport) QueryRulesetPut { // ----- API Definition ------------------------------------------------------- -// QueryRulesetPut creates or updates a query ruleset. -// -// This API is experimental. +// QueryRulesPutRuleset creates or updates a query ruleset. // // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/put-query-ruleset.html. -type QueryRulesetPut func(body io.Reader, ruleset_id string, o ...func(*QueryRulesetPutRequest)) (*Response, error) +type QueryRulesPutRuleset func(body io.Reader, ruleset_id string, o ...func(*QueryRulesPutRulesetRequest)) (*Response, error) -// QueryRulesetPutRequest configures the Query Ruleset Put API request. -type QueryRulesetPutRequest struct { +// QueryRulesPutRulesetRequest configures the Query Rules Put Ruleset API request. +type QueryRulesPutRulesetRequest struct { Body io.Reader RulesetID string @@ -69,7 +67,7 @@ type QueryRulesetPutRequest struct { } // Do executes the request and returns response or error. -func (r QueryRulesetPutRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { +func (r QueryRulesPutRulesetRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { var ( method string path strings.Builder @@ -78,7 +76,7 @@ func (r QueryRulesetPutRequest) Do(providedCtx context.Context, transport Transp ) if instrument, ok := r.instrument.(Instrumentation); ok { - ctx = instrument.Start(providedCtx, "query_ruleset.put") + ctx = instrument.Start(providedCtx, "query_rules.put_ruleset") defer instrument.Close(ctx) } if ctx == nil { @@ -152,14 +150,14 @@ func (r QueryRulesetPutRequest) Do(providedCtx context.Context, transport Transp } if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.BeforeRequest(req, "query_ruleset.put") - if reader := instrument.RecordRequestBody(ctx, "query_ruleset.put", r.Body); reader != nil { + instrument.BeforeRequest(req, "query_rules.put_ruleset") + if reader := instrument.RecordRequestBody(ctx, "query_rules.put_ruleset", r.Body); reader != nil { req.Body = reader } } res, err := transport.Perform(req) if instrument, ok := r.instrument.(Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "query_ruleset.put") + instrument.AfterRequest(req, "elasticsearch", "query_rules.put_ruleset") } if err != nil { if instrument, ok := r.instrument.(Instrumentation); ok { @@ -178,43 +176,43 @@ func (r QueryRulesetPutRequest) Do(providedCtx context.Context, transport Transp } // WithContext sets the request context. -func (f QueryRulesetPut) WithContext(v context.Context) func(*QueryRulesetPutRequest) { - return func(r *QueryRulesetPutRequest) { +func (f QueryRulesPutRuleset) WithContext(v context.Context) func(*QueryRulesPutRulesetRequest) { + return func(r *QueryRulesPutRulesetRequest) { r.ctx = v } } // WithPretty makes the response body pretty-printed. -func (f QueryRulesetPut) WithPretty() func(*QueryRulesetPutRequest) { - return func(r *QueryRulesetPutRequest) { +func (f QueryRulesPutRuleset) WithPretty() func(*QueryRulesPutRulesetRequest) { + return func(r *QueryRulesPutRulesetRequest) { r.Pretty = true } } // WithHuman makes statistical values human-readable. -func (f QueryRulesetPut) WithHuman() func(*QueryRulesetPutRequest) { - return func(r *QueryRulesetPutRequest) { +func (f QueryRulesPutRuleset) WithHuman() func(*QueryRulesPutRulesetRequest) { + return func(r *QueryRulesPutRulesetRequest) { r.Human = true } } // WithErrorTrace includes the stack trace for errors in the response body. -func (f QueryRulesetPut) WithErrorTrace() func(*QueryRulesetPutRequest) { - return func(r *QueryRulesetPutRequest) { +func (f QueryRulesPutRuleset) WithErrorTrace() func(*QueryRulesPutRulesetRequest) { + return func(r *QueryRulesPutRulesetRequest) { r.ErrorTrace = true } } // WithFilterPath filters the properties of the response body. -func (f QueryRulesetPut) WithFilterPath(v ...string) func(*QueryRulesetPutRequest) { - return func(r *QueryRulesetPutRequest) { +func (f QueryRulesPutRuleset) WithFilterPath(v ...string) func(*QueryRulesPutRulesetRequest) { + return func(r *QueryRulesPutRulesetRequest) { r.FilterPath = v } } // WithHeader adds the headers to the HTTP request. -func (f QueryRulesetPut) WithHeader(h map[string]string) func(*QueryRulesetPutRequest) { - return func(r *QueryRulesetPutRequest) { +func (f QueryRulesPutRuleset) WithHeader(h map[string]string) func(*QueryRulesPutRulesetRequest) { + return func(r *QueryRulesPutRulesetRequest) { if r.Header == nil { r.Header = make(http.Header) } @@ -225,8 +223,8 @@ func (f QueryRulesetPut) WithHeader(h map[string]string) func(*QueryRulesetPutRe } // WithOpaqueID adds the X-Opaque-Id header to the HTTP request. -func (f QueryRulesetPut) WithOpaqueID(s string) func(*QueryRulesetPutRequest) { - return func(r *QueryRulesetPutRequest) { +func (f QueryRulesPutRuleset) WithOpaqueID(s string) func(*QueryRulesPutRulesetRequest) { + return func(r *QueryRulesPutRulesetRequest) { if r.Header == nil { r.Header = make(http.Header) } diff --git a/esapi/api.rank_eval.go b/esapi/api.rank_eval.go index aa71fdd0bd..8692870a11 100644 --- a/esapi/api.rank_eval.go +++ b/esapi/api.rank_eval.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.reindex.go b/esapi/api.reindex.go index ca485df69d..cffffdea21 100644 --- a/esapi/api.reindex.go +++ b/esapi/api.reindex.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.reindex_rethrottle.go b/esapi/api.reindex_rethrottle.go index bedc4e7fd7..dc05029812 100644 --- a/esapi/api.reindex_rethrottle.go +++ b/esapi/api.reindex_rethrottle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.render_search_template.go b/esapi/api.render_search_template.go index 5265423ed3..68a4c906b9 100644 --- a/esapi/api.render_search_template.go +++ b/esapi/api.render_search_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.scripts_painless_execute.go b/esapi/api.scripts_painless_execute.go index 0002f43381..51564d530d 100644 --- a/esapi/api.scripts_painless_execute.go +++ b/esapi/api.scripts_painless_execute.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.scroll.go b/esapi/api.scroll.go index fcf998a2d8..e49674ea4b 100644 --- a/esapi/api.scroll.go +++ b/esapi/api.scroll.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.search.go b/esapi/api.search.go index cd4ba2d7a2..0a77bc7bec 100644 --- a/esapi/api.search.go +++ b/esapi/api.search.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.delete.go b/esapi/api.search_application.delete.go index b0c35dd4eb..db519d17a5 100644 --- a/esapi/api.search_application.delete.go +++ b/esapi/api.search_application.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.delete_behavioral_analytics.go b/esapi/api.search_application.delete_behavioral_analytics.go index 6a4ade475c..321123a287 100644 --- a/esapi/api.search_application.delete_behavioral_analytics.go +++ b/esapi/api.search_application.delete_behavioral_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.get.go b/esapi/api.search_application.get.go index 85a7ebf7d4..b0ef8a8202 100644 --- a/esapi/api.search_application.get.go +++ b/esapi/api.search_application.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.get_behavioral_analytics.go b/esapi/api.search_application.get_behavioral_analytics.go index 1d2e41fac1..e3e22935ae 100644 --- a/esapi/api.search_application.get_behavioral_analytics.go +++ b/esapi/api.search_application.get_behavioral_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.list.go b/esapi/api.search_application.list.go index 0ce836b58c..9cb88753d2 100644 --- a/esapi/api.search_application.list.go +++ b/esapi/api.search_application.list.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.post_behavioral_analytics_event.go b/esapi/api.search_application.post_behavioral_analytics_event.go index 04fc1a7f9b..8e64f9db10 100644 --- a/esapi/api.search_application.post_behavioral_analytics_event.go +++ b/esapi/api.search_application.post_behavioral_analytics_event.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.put.go b/esapi/api.search_application.put.go index 10e3bced48..a65b771723 100644 --- a/esapi/api.search_application.put.go +++ b/esapi/api.search_application.put.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.put_behavioral_analytics.go b/esapi/api.search_application.put_behavioral_analytics.go index f2eb52558d..d826175fca 100644 --- a/esapi/api.search_application.put_behavioral_analytics.go +++ b/esapi/api.search_application.put_behavioral_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.render_query.go b/esapi/api.search_application.render_query.go index 617090526e..83eee21117 100644 --- a/esapi/api.search_application.render_query.go +++ b/esapi/api.search_application.render_query.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_application.search.go b/esapi/api.search_application.search.go index 0b9564e01b..309ed738f0 100644 --- a/esapi/api.search_application.search.go +++ b/esapi/api.search_application.search.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "io" "net/http" + "strconv" "strings" ) @@ -56,6 +57,8 @@ type SearchApplicationSearchRequest struct { Name string + TypedKeys *bool + Pretty bool Human bool ErrorTrace bool @@ -103,6 +106,10 @@ func (r SearchApplicationSearchRequest) Do(providedCtx context.Context, transpor params = make(map[string]string) + if r.TypedKeys != nil { + params["typed_keys"] = strconv.FormatBool(*r.TypedKeys) + } + if r.Pretty { params["pretty"] = "true" } @@ -195,6 +202,13 @@ func (f SearchApplicationSearch) WithBody(v io.Reader) func(*SearchApplicationSe } } +// WithTypedKeys - specify whether aggregation and suggester names should be prefixed by their respective types in the response. +func (f SearchApplicationSearch) WithTypedKeys(v bool) func(*SearchApplicationSearchRequest) { + return func(r *SearchApplicationSearchRequest) { + r.TypedKeys = &v + } +} + // WithPretty makes the response body pretty-printed. func (f SearchApplicationSearch) WithPretty() func(*SearchApplicationSearchRequest) { return func(r *SearchApplicationSearchRequest) { diff --git a/esapi/api.search_mvt.go b/esapi/api.search_mvt.go index b29275d930..475520a77d 100644 --- a/esapi/api.search_mvt.go +++ b/esapi/api.search_mvt.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_shards.go b/esapi/api.search_shards.go index 7dd05d79bf..dd2aacf4a6 100644 --- a/esapi/api.search_shards.go +++ b/esapi/api.search_shards.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.search_template.go b/esapi/api.search_template.go index 269583b84b..49996fc2e9 100644 --- a/esapi/api.search_template.go +++ b/esapi/api.search_template.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.shutdown.delete_node.go b/esapi/api.shutdown.delete_node.go index 94db148f13..0b69d626ef 100644 --- a/esapi/api.shutdown.delete_node.go +++ b/esapi/api.shutdown.delete_node.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.shutdown.get_node.go b/esapi/api.shutdown.get_node.go index a8ea5cd4ec..bd309083ee 100644 --- a/esapi/api.shutdown.get_node.go +++ b/esapi/api.shutdown.get_node.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newShutdownGetNodeFunc(t Transport) ShutdownGetNode { @@ -51,6 +52,8 @@ type ShutdownGetNode func(o ...func(*ShutdownGetNodeRequest)) (*Response, error) type ShutdownGetNodeRequest struct { NodeID string + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -98,6 +101,10 @@ func (r ShutdownGetNodeRequest) Do(providedCtx context.Context, transport Transp params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -183,6 +190,13 @@ func (f ShutdownGetNode) WithNodeID(v string) func(*ShutdownGetNodeRequest) { } } +// WithMasterTimeout - timeout for processing on master node. +func (f ShutdownGetNode) WithMasterTimeout(v time.Duration) func(*ShutdownGetNodeRequest) { + return func(r *ShutdownGetNodeRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f ShutdownGetNode) WithPretty() func(*ShutdownGetNodeRequest) { return func(r *ShutdownGetNodeRequest) { diff --git a/esapi/api.shutdown.put_node.go b/esapi/api.shutdown.put_node.go index 87f6296588..c41da842df 100644 --- a/esapi/api.shutdown.put_node.go +++ b/esapi/api.shutdown.put_node.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.simulate.ingest.go b/esapi/api.simulate.ingest.go index f0a3c8d417..87c285998c 100644 --- a/esapi/api.simulate.ingest.go +++ b/esapi/api.simulate.ingest.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.cleanup_repository.go b/esapi/api.snapshot.cleanup_repository.go index 6e99023b90..82540c4a1a 100644 --- a/esapi/api.snapshot.cleanup_repository.go +++ b/esapi/api.snapshot.cleanup_repository.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.clone.go b/esapi/api.snapshot.clone.go index cf21219af1..f063d68c0a 100644 --- a/esapi/api.snapshot.clone.go +++ b/esapi/api.snapshot.clone.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.create.go b/esapi/api.snapshot.create.go index 3082947b38..79976a0122 100644 --- a/esapi/api.snapshot.create.go +++ b/esapi/api.snapshot.create.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.create_repository.go b/esapi/api.snapshot.create_repository.go index 295bc6b545..af62049817 100644 --- a/esapi/api.snapshot.create_repository.go +++ b/esapi/api.snapshot.create_repository.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.delete.go b/esapi/api.snapshot.delete.go index 58f4b2735c..f8f19f3905 100644 --- a/esapi/api.snapshot.delete.go +++ b/esapi/api.snapshot.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "errors" "net/http" + "strconv" "strings" "time" ) @@ -54,7 +55,8 @@ type SnapshotDeleteRequest struct { Repository string Snapshot []string - MasterTimeout time.Duration + MasterTimeout time.Duration + WaitForCompletion *bool Pretty bool Human bool @@ -112,6 +114,10 @@ func (r SnapshotDeleteRequest) Do(providedCtx context.Context, transport Transpo params["master_timeout"] = formatDuration(r.MasterTimeout) } + if r.WaitForCompletion != nil { + params["wait_for_completion"] = strconv.FormatBool(*r.WaitForCompletion) + } + if r.Pretty { params["pretty"] = "true" } @@ -197,6 +203,13 @@ func (f SnapshotDelete) WithMasterTimeout(v time.Duration) func(*SnapshotDeleteR } } +// WithWaitForCompletion - should this request wait until the operation has completed before returning. +func (f SnapshotDelete) WithWaitForCompletion(v bool) func(*SnapshotDeleteRequest) { + return func(r *SnapshotDeleteRequest) { + r.WaitForCompletion = &v + } +} + // WithPretty makes the response body pretty-printed. func (f SnapshotDelete) WithPretty() func(*SnapshotDeleteRequest) { return func(r *SnapshotDeleteRequest) { diff --git a/esapi/api.snapshot.delete_repository.go b/esapi/api.snapshot.delete_repository.go index 1c932ec1eb..32fee65d76 100644 --- a/esapi/api.snapshot.delete_repository.go +++ b/esapi/api.snapshot.delete_repository.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.get.go b/esapi/api.snapshot.get.go index 23db446462..a7a1bd1919 100644 --- a/esapi/api.snapshot.get.go +++ b/esapi/api.snapshot.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.get_repository.go b/esapi/api.snapshot.get_repository.go index 82c4ed6372..3bb23496ba 100644 --- a/esapi/api.snapshot.get_repository.go +++ b/esapi/api.snapshot.get_repository.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.repository_analyze.go b/esapi/api.snapshot.repository_analyze.go index 79d876095c..00e303db0f 100644 --- a/esapi/api.snapshot.repository_analyze.go +++ b/esapi/api.snapshot.repository_analyze.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.restore.go b/esapi/api.snapshot.restore.go index c402746ed7..5091c9e3eb 100644 --- a/esapi/api.snapshot.restore.go +++ b/esapi/api.snapshot.restore.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.status.go b/esapi/api.snapshot.status.go index 0844416758..220d54c5dc 100644 --- a/esapi/api.snapshot.status.go +++ b/esapi/api.snapshot.status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.snapshot.verify_repository.go b/esapi/api.snapshot.verify_repository.go index 595ba27141..dd0303b799 100644 --- a/esapi/api.snapshot.verify_repository.go +++ b/esapi/api.snapshot.verify_repository.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.synonyms.delete_synonym.go b/esapi/api.synonyms.delete_synonym.go index 828aeb1392..4d2bcc0340 100644 --- a/esapi/api.synonyms.delete_synonym.go +++ b/esapi/api.synonyms.delete_synonym.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.synonyms.delete_synonym_rule.go b/esapi/api.synonyms.delete_synonym_rule.go index 0eff28028c..082f8fa2da 100644 --- a/esapi/api.synonyms.delete_synonym_rule.go +++ b/esapi/api.synonyms.delete_synonym_rule.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.synonyms.get_synonym.go b/esapi/api.synonyms.get_synonym.go index 2bb42923f3..355696719f 100644 --- a/esapi/api.synonyms.get_synonym.go +++ b/esapi/api.synonyms.get_synonym.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.synonyms.get_synonym_rule.go b/esapi/api.synonyms.get_synonym_rule.go index 2fce0e4716..c078771f76 100644 --- a/esapi/api.synonyms.get_synonym_rule.go +++ b/esapi/api.synonyms.get_synonym_rule.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.synonyms.get_synonyms_sets.go b/esapi/api.synonyms.get_synonyms_sets.go index eba33df1ee..3074bd0d77 100644 --- a/esapi/api.synonyms.get_synonyms_sets.go +++ b/esapi/api.synonyms.get_synonyms_sets.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.synonyms.put_synonym.go b/esapi/api.synonyms.put_synonym.go index 837b988544..0d8b2becbd 100644 --- a/esapi/api.synonyms.put_synonym.go +++ b/esapi/api.synonyms.put_synonym.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.synonyms.put_synonym_rule.go b/esapi/api.synonyms.put_synonym_rule.go index bdea3bf69b..d9426ef807 100644 --- a/esapi/api.synonyms.put_synonym_rule.go +++ b/esapi/api.synonyms.put_synonym_rule.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.tasks.cancel.go b/esapi/api.tasks.cancel.go index 002c42f3db..4553445ada 100644 --- a/esapi/api.tasks.cancel.go +++ b/esapi/api.tasks.cancel.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.tasks.get.go b/esapi/api.tasks.get.go index 9e2a12f406..cda9add11a 100644 --- a/esapi/api.tasks.get.go +++ b/esapi/api.tasks.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.tasks.list.go b/esapi/api.tasks.list.go index 6a2da2bfe4..e0cde90ede 100644 --- a/esapi/api.tasks.list.go +++ b/esapi/api.tasks.list.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.terms_enum.go b/esapi/api.terms_enum.go index 3d24f409fd..0704984d4e 100644 --- a/esapi/api.terms_enum.go +++ b/esapi/api.terms_enum.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.termvectors.go b/esapi/api.termvectors.go index 221c085706..cef3cfca06 100644 --- a/esapi/api.termvectors.go +++ b/esapi/api.termvectors.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.update.go b/esapi/api.update.go index f1a52fa368..bd404a372e 100644 --- a/esapi/api.update.go +++ b/esapi/api.update.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.update_by_query.go b/esapi/api.update_by_query.go index 5eab11fc10..93666edd0c 100644 --- a/esapi/api.update_by_query.go +++ b/esapi/api.update_by_query.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.update_by_query_rethrottle.go b/esapi/api.update_by_query_rethrottle.go index b252953860..01d3d9f984 100644 --- a/esapi/api.update_by_query_rethrottle.go +++ b/esapi/api.update_by_query_rethrottle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.async_search.delete.go b/esapi/api.xpack.async_search.delete.go index 61727ee0e0..d55305ac6c 100644 --- a/esapi/api.xpack.async_search.delete.go +++ b/esapi/api.xpack.async_search.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.async_search.get.go b/esapi/api.xpack.async_search.get.go index 40db490bd7..bf474b704e 100644 --- a/esapi/api.xpack.async_search.get.go +++ b/esapi/api.xpack.async_search.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.async_search.status.go b/esapi/api.xpack.async_search.status.go index 2c3c03229a..1b229475e1 100644 --- a/esapi/api.xpack.async_search.status.go +++ b/esapi/api.xpack.async_search.status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.async_search.submit.go b/esapi/api.xpack.async_search.submit.go index a97f56e5be..204669b8dc 100644 --- a/esapi/api.xpack.async_search.submit.go +++ b/esapi/api.xpack.async_search.submit.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.autoscaling.delete_autoscaling_policy.go b/esapi/api.xpack.autoscaling.delete_autoscaling_policy.go index 80e1a9da1b..629269060c 100644 --- a/esapi/api.xpack.autoscaling.delete_autoscaling_policy.go +++ b/esapi/api.xpack.autoscaling.delete_autoscaling_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newAutoscalingDeleteAutoscalingPolicyFunc(t Transport) AutoscalingDeleteAutoscalingPolicy { @@ -51,6 +52,9 @@ type AutoscalingDeleteAutoscalingPolicy func(name string, o ...func(*Autoscaling type AutoscalingDeleteAutoscalingPolicyRequest struct { Name string + MasterTimeout time.Duration + Timeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -96,6 +100,14 @@ func (r AutoscalingDeleteAutoscalingPolicyRequest) Do(providedCtx context.Contex params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + + if r.Timeout != 0 { + params["timeout"] = formatDuration(r.Timeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -174,6 +186,20 @@ func (f AutoscalingDeleteAutoscalingPolicy) WithContext(v context.Context) func( } } +// WithMasterTimeout - timeout for processing on master node. +func (f AutoscalingDeleteAutoscalingPolicy) WithMasterTimeout(v time.Duration) func(*AutoscalingDeleteAutoscalingPolicyRequest) { + return func(r *AutoscalingDeleteAutoscalingPolicyRequest) { + r.MasterTimeout = v + } +} + +// WithTimeout - timeout for acknowledgement of update from all nodes in cluster. +func (f AutoscalingDeleteAutoscalingPolicy) WithTimeout(v time.Duration) func(*AutoscalingDeleteAutoscalingPolicyRequest) { + return func(r *AutoscalingDeleteAutoscalingPolicyRequest) { + r.Timeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f AutoscalingDeleteAutoscalingPolicy) WithPretty() func(*AutoscalingDeleteAutoscalingPolicyRequest) { return func(r *AutoscalingDeleteAutoscalingPolicyRequest) { diff --git a/esapi/api.xpack.autoscaling.get_autoscaling_capacity.go b/esapi/api.xpack.autoscaling.get_autoscaling_capacity.go index 55af74010d..b419c36cfd 100644 --- a/esapi/api.xpack.autoscaling.get_autoscaling_capacity.go +++ b/esapi/api.xpack.autoscaling.get_autoscaling_capacity.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newAutoscalingGetAutoscalingCapacityFunc(t Transport) AutoscalingGetAutoscalingCapacity { @@ -49,6 +50,8 @@ type AutoscalingGetAutoscalingCapacity func(o ...func(*AutoscalingGetAutoscaling // AutoscalingGetAutoscalingCapacityRequest configures the Autoscaling Get Autoscaling Capacity API request. type AutoscalingGetAutoscalingCapacityRequest struct { + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -86,6 +89,10 @@ func (r AutoscalingGetAutoscalingCapacityRequest) Do(providedCtx context.Context params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -164,6 +171,13 @@ func (f AutoscalingGetAutoscalingCapacity) WithContext(v context.Context) func(* } } +// WithMasterTimeout - timeout for processing on master node. +func (f AutoscalingGetAutoscalingCapacity) WithMasterTimeout(v time.Duration) func(*AutoscalingGetAutoscalingCapacityRequest) { + return func(r *AutoscalingGetAutoscalingCapacityRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f AutoscalingGetAutoscalingCapacity) WithPretty() func(*AutoscalingGetAutoscalingCapacityRequest) { return func(r *AutoscalingGetAutoscalingCapacityRequest) { diff --git a/esapi/api.xpack.autoscaling.get_autoscaling_policy.go b/esapi/api.xpack.autoscaling.get_autoscaling_policy.go index b287d76cd5..b68ea1babd 100644 --- a/esapi/api.xpack.autoscaling.get_autoscaling_policy.go +++ b/esapi/api.xpack.autoscaling.get_autoscaling_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newAutoscalingGetAutoscalingPolicyFunc(t Transport) AutoscalingGetAutoscalingPolicy { @@ -51,6 +52,8 @@ type AutoscalingGetAutoscalingPolicy func(name string, o ...func(*AutoscalingGet type AutoscalingGetAutoscalingPolicyRequest struct { Name string + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -96,6 +99,10 @@ func (r AutoscalingGetAutoscalingPolicyRequest) Do(providedCtx context.Context, params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -174,6 +181,13 @@ func (f AutoscalingGetAutoscalingPolicy) WithContext(v context.Context) func(*Au } } +// WithMasterTimeout - timeout for processing on master node. +func (f AutoscalingGetAutoscalingPolicy) WithMasterTimeout(v time.Duration) func(*AutoscalingGetAutoscalingPolicyRequest) { + return func(r *AutoscalingGetAutoscalingPolicyRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f AutoscalingGetAutoscalingPolicy) WithPretty() func(*AutoscalingGetAutoscalingPolicyRequest) { return func(r *AutoscalingGetAutoscalingPolicyRequest) { diff --git a/esapi/api.xpack.autoscaling.put_autoscaling_policy.go b/esapi/api.xpack.autoscaling.put_autoscaling_policy.go index cd8cb4159b..a26b3dcefa 100644 --- a/esapi/api.xpack.autoscaling.put_autoscaling_policy.go +++ b/esapi/api.xpack.autoscaling.put_autoscaling_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -24,6 +24,7 @@ import ( "io" "net/http" "strings" + "time" ) func newAutoscalingPutAutoscalingPolicyFunc(t Transport) AutoscalingPutAutoscalingPolicy { @@ -54,6 +55,9 @@ type AutoscalingPutAutoscalingPolicyRequest struct { Name string + MasterTimeout time.Duration + Timeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -99,6 +103,14 @@ func (r AutoscalingPutAutoscalingPolicyRequest) Do(providedCtx context.Context, params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + + if r.Timeout != 0 { + params["timeout"] = formatDuration(r.Timeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -184,6 +196,20 @@ func (f AutoscalingPutAutoscalingPolicy) WithContext(v context.Context) func(*Au } } +// WithMasterTimeout - timeout for processing on master node. +func (f AutoscalingPutAutoscalingPolicy) WithMasterTimeout(v time.Duration) func(*AutoscalingPutAutoscalingPolicyRequest) { + return func(r *AutoscalingPutAutoscalingPolicyRequest) { + r.MasterTimeout = v + } +} + +// WithTimeout - timeout for acknowledgement of update from all nodes in cluster. +func (f AutoscalingPutAutoscalingPolicy) WithTimeout(v time.Duration) func(*AutoscalingPutAutoscalingPolicyRequest) { + return func(r *AutoscalingPutAutoscalingPolicyRequest) { + r.Timeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f AutoscalingPutAutoscalingPolicy) WithPretty() func(*AutoscalingPutAutoscalingPolicyRequest) { return func(r *AutoscalingPutAutoscalingPolicyRequest) { diff --git a/esapi/api.xpack.cat.ml_data_frame_analytics.go b/esapi/api.xpack.cat.ml_data_frame_analytics.go index 752e0bac6b..ae4d45915e 100644 --- a/esapi/api.xpack.cat.ml_data_frame_analytics.go +++ b/esapi/api.xpack.cat.ml_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.cat.ml_datafeeds.go b/esapi/api.xpack.cat.ml_datafeeds.go index de8dae78a3..386b5a8fbc 100644 --- a/esapi/api.xpack.cat.ml_datafeeds.go +++ b/esapi/api.xpack.cat.ml_datafeeds.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.cat.ml_jobs.go b/esapi/api.xpack.cat.ml_jobs.go index b6703af90b..d065190da9 100644 --- a/esapi/api.xpack.cat.ml_jobs.go +++ b/esapi/api.xpack.cat.ml_jobs.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.cat.ml_trained_models.go b/esapi/api.xpack.cat.ml_trained_models.go index 411ca5bd7d..f1e0bd7a01 100644 --- a/esapi/api.xpack.cat.ml_trained_models.go +++ b/esapi/api.xpack.cat.ml_trained_models.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.cat.transforms.go b/esapi/api.xpack.cat.transforms.go index 1d43f6ad18..1d6338d2d3 100644 --- a/esapi/api.xpack.cat.transforms.go +++ b/esapi/api.xpack.cat.transforms.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ccr.delete_auto_follow_pattern.go b/esapi/api.xpack.ccr.delete_auto_follow_pattern.go index edcbdb39e8..9c0b0d21c8 100644 --- a/esapi/api.xpack.ccr.delete_auto_follow_pattern.go +++ b/esapi/api.xpack.ccr.delete_auto_follow_pattern.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newCCRDeleteAutoFollowPatternFunc(t Transport) CCRDeleteAutoFollowPattern { @@ -51,6 +52,8 @@ type CCRDeleteAutoFollowPattern func(name string, o ...func(*CCRDeleteAutoFollow type CCRDeleteAutoFollowPatternRequest struct { Name string + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -96,6 +99,10 @@ func (r CCRDeleteAutoFollowPatternRequest) Do(providedCtx context.Context, trans params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -174,6 +181,13 @@ func (f CCRDeleteAutoFollowPattern) WithContext(v context.Context) func(*CCRDele } } +// WithMasterTimeout - explicit operation timeout for connection to master node. +func (f CCRDeleteAutoFollowPattern) WithMasterTimeout(v time.Duration) func(*CCRDeleteAutoFollowPatternRequest) { + return func(r *CCRDeleteAutoFollowPatternRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f CCRDeleteAutoFollowPattern) WithPretty() func(*CCRDeleteAutoFollowPatternRequest) { return func(r *CCRDeleteAutoFollowPatternRequest) { diff --git a/esapi/api.xpack.ccr.follow.go b/esapi/api.xpack.ccr.follow.go index 96833d2417..f7ec7dc003 100644 --- a/esapi/api.xpack.ccr.follow.go +++ b/esapi/api.xpack.ccr.follow.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -24,6 +24,7 @@ import ( "io" "net/http" "strings" + "time" ) func newCCRFollowFunc(t Transport) CCRFollow { @@ -54,6 +55,7 @@ type CCRFollowRequest struct { Body io.Reader + MasterTimeout time.Duration WaitForActiveShards string Pretty bool @@ -101,6 +103,10 @@ func (r CCRFollowRequest) Do(providedCtx context.Context, transport Transport) ( params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.WaitForActiveShards != "" { params["wait_for_active_shards"] = r.WaitForActiveShards } @@ -190,6 +196,13 @@ func (f CCRFollow) WithContext(v context.Context) func(*CCRFollowRequest) { } } +// WithMasterTimeout - explicit operation timeout for connection to master node. +func (f CCRFollow) WithMasterTimeout(v time.Duration) func(*CCRFollowRequest) { + return func(r *CCRFollowRequest) { + r.MasterTimeout = v + } +} + // WithWaitForActiveShards - sets the number of shard copies that must be active before returning. defaults to 0. set to `all` for all shard copies, otherwise set to any non-negative value less than or equal to the total number of copies for the shard (number of replicas + 1). func (f CCRFollow) WithWaitForActiveShards(v string) func(*CCRFollowRequest) { return func(r *CCRFollowRequest) { diff --git a/esapi/api.xpack.ccr.follow_info.go b/esapi/api.xpack.ccr.follow_info.go index 8b6d4d71a1..cd2eaaca96 100644 --- a/esapi/api.xpack.ccr.follow_info.go +++ b/esapi/api.xpack.ccr.follow_info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -24,6 +24,7 @@ import ( "errors" "net/http" "strings" + "time" ) func newCCRFollowInfoFunc(t Transport) CCRFollowInfo { @@ -52,6 +53,8 @@ type CCRFollowInfo func(index []string, o ...func(*CCRFollowInfoRequest)) (*Resp type CCRFollowInfoRequest struct { Index []string + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -101,6 +104,10 @@ func (r CCRFollowInfoRequest) Do(providedCtx context.Context, transport Transpor params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -179,6 +186,13 @@ func (f CCRFollowInfo) WithContext(v context.Context) func(*CCRFollowInfoRequest } } +// WithMasterTimeout - explicit operation timeout for connection to master node. +func (f CCRFollowInfo) WithMasterTimeout(v time.Duration) func(*CCRFollowInfoRequest) { + return func(r *CCRFollowInfoRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f CCRFollowInfo) WithPretty() func(*CCRFollowInfoRequest) { return func(r *CCRFollowInfoRequest) { diff --git a/esapi/api.xpack.ccr.follow_stats.go b/esapi/api.xpack.ccr.follow_stats.go index 469e8b3d6e..11865018fc 100644 --- a/esapi/api.xpack.ccr.follow_stats.go +++ b/esapi/api.xpack.ccr.follow_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -24,6 +24,7 @@ import ( "errors" "net/http" "strings" + "time" ) func newCCRFollowStatsFunc(t Transport) CCRFollowStats { @@ -52,6 +53,8 @@ type CCRFollowStats func(index []string, o ...func(*CCRFollowStatsRequest)) (*Re type CCRFollowStatsRequest struct { Index []string + Timeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -101,6 +104,10 @@ func (r CCRFollowStatsRequest) Do(providedCtx context.Context, transport Transpo params = make(map[string]string) + if r.Timeout != 0 { + params["timeout"] = formatDuration(r.Timeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -179,6 +186,13 @@ func (f CCRFollowStats) WithContext(v context.Context) func(*CCRFollowStatsReque } } +// WithTimeout - explicit operation timeout. +func (f CCRFollowStats) WithTimeout(v time.Duration) func(*CCRFollowStatsRequest) { + return func(r *CCRFollowStatsRequest) { + r.Timeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f CCRFollowStats) WithPretty() func(*CCRFollowStatsRequest) { return func(r *CCRFollowStatsRequest) { diff --git a/esapi/api.xpack.ccr.forget_follower.go b/esapi/api.xpack.ccr.forget_follower.go index 14eb812443..aabbb26d85 100644 --- a/esapi/api.xpack.ccr.forget_follower.go +++ b/esapi/api.xpack.ccr.forget_follower.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -24,6 +24,7 @@ import ( "io" "net/http" "strings" + "time" ) func newCCRForgetFollowerFunc(t Transport) CCRForgetFollower { @@ -54,6 +55,8 @@ type CCRForgetFollowerRequest struct { Body io.Reader + Timeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -99,6 +102,10 @@ func (r CCRForgetFollowerRequest) Do(providedCtx context.Context, transport Tran params = make(map[string]string) + if r.Timeout != 0 { + params["timeout"] = formatDuration(r.Timeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -184,6 +191,13 @@ func (f CCRForgetFollower) WithContext(v context.Context) func(*CCRForgetFollowe } } +// WithTimeout - explicit operation timeout. +func (f CCRForgetFollower) WithTimeout(v time.Duration) func(*CCRForgetFollowerRequest) { + return func(r *CCRForgetFollowerRequest) { + r.Timeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f CCRForgetFollower) WithPretty() func(*CCRForgetFollowerRequest) { return func(r *CCRForgetFollowerRequest) { diff --git a/esapi/api.xpack.ccr.get_auto_follow_pattern.go b/esapi/api.xpack.ccr.get_auto_follow_pattern.go index 1e64c562a1..585cea18d0 100644 --- a/esapi/api.xpack.ccr.get_auto_follow_pattern.go +++ b/esapi/api.xpack.ccr.get_auto_follow_pattern.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newCCRGetAutoFollowPatternFunc(t Transport) CCRGetAutoFollowPattern { @@ -51,6 +52,8 @@ type CCRGetAutoFollowPattern func(o ...func(*CCRGetAutoFollowPatternRequest)) (* type CCRGetAutoFollowPatternRequest struct { Name string + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -98,6 +101,10 @@ func (r CCRGetAutoFollowPatternRequest) Do(providedCtx context.Context, transpor params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -183,6 +190,13 @@ func (f CCRGetAutoFollowPattern) WithName(v string) func(*CCRGetAutoFollowPatter } } +// WithMasterTimeout - explicit operation timeout for connection to master node. +func (f CCRGetAutoFollowPattern) WithMasterTimeout(v time.Duration) func(*CCRGetAutoFollowPatternRequest) { + return func(r *CCRGetAutoFollowPatternRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f CCRGetAutoFollowPattern) WithPretty() func(*CCRGetAutoFollowPatternRequest) { return func(r *CCRGetAutoFollowPatternRequest) { diff --git a/esapi/api.xpack.ccr.pause_auto_follow_pattern.go b/esapi/api.xpack.ccr.pause_auto_follow_pattern.go index ec926df6d1..a336b60fd9 100644 --- a/esapi/api.xpack.ccr.pause_auto_follow_pattern.go +++ b/esapi/api.xpack.ccr.pause_auto_follow_pattern.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newCCRPauseAutoFollowPatternFunc(t Transport) CCRPauseAutoFollowPattern { @@ -51,6 +52,8 @@ type CCRPauseAutoFollowPattern func(name string, o ...func(*CCRPauseAutoFollowPa type CCRPauseAutoFollowPatternRequest struct { Name string + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -98,6 +101,10 @@ func (r CCRPauseAutoFollowPatternRequest) Do(providedCtx context.Context, transp params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -176,6 +183,13 @@ func (f CCRPauseAutoFollowPattern) WithContext(v context.Context) func(*CCRPause } } +// WithMasterTimeout - explicit operation timeout for connection to master node. +func (f CCRPauseAutoFollowPattern) WithMasterTimeout(v time.Duration) func(*CCRPauseAutoFollowPatternRequest) { + return func(r *CCRPauseAutoFollowPatternRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f CCRPauseAutoFollowPattern) WithPretty() func(*CCRPauseAutoFollowPatternRequest) { return func(r *CCRPauseAutoFollowPatternRequest) { diff --git a/esapi/api.xpack.ccr.pause_follow.go b/esapi/api.xpack.ccr.pause_follow.go index c9238deb9b..c1a09751be 100644 --- a/esapi/api.xpack.ccr.pause_follow.go +++ b/esapi/api.xpack.ccr.pause_follow.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newCCRPauseFollowFunc(t Transport) CCRPauseFollow { @@ -51,6 +52,8 @@ type CCRPauseFollow func(index string, o ...func(*CCRPauseFollowRequest)) (*Resp type CCRPauseFollowRequest struct { Index string + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -96,6 +99,10 @@ func (r CCRPauseFollowRequest) Do(providedCtx context.Context, transport Transpo params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -174,6 +181,13 @@ func (f CCRPauseFollow) WithContext(v context.Context) func(*CCRPauseFollowReque } } +// WithMasterTimeout - explicit operation timeout for connection to master node. +func (f CCRPauseFollow) WithMasterTimeout(v time.Duration) func(*CCRPauseFollowRequest) { + return func(r *CCRPauseFollowRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f CCRPauseFollow) WithPretty() func(*CCRPauseFollowRequest) { return func(r *CCRPauseFollowRequest) { diff --git a/esapi/api.xpack.ccr.put_auto_follow_pattern.go b/esapi/api.xpack.ccr.put_auto_follow_pattern.go index 163d642162..93f71ad15c 100644 --- a/esapi/api.xpack.ccr.put_auto_follow_pattern.go +++ b/esapi/api.xpack.ccr.put_auto_follow_pattern.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -24,6 +24,7 @@ import ( "io" "net/http" "strings" + "time" ) func newCCRPutAutoFollowPatternFunc(t Transport) CCRPutAutoFollowPattern { @@ -54,6 +55,8 @@ type CCRPutAutoFollowPatternRequest struct { Name string + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -99,6 +102,10 @@ func (r CCRPutAutoFollowPatternRequest) Do(providedCtx context.Context, transpor params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -184,6 +191,13 @@ func (f CCRPutAutoFollowPattern) WithContext(v context.Context) func(*CCRPutAuto } } +// WithMasterTimeout - explicit operation timeout for connection to master node. +func (f CCRPutAutoFollowPattern) WithMasterTimeout(v time.Duration) func(*CCRPutAutoFollowPatternRequest) { + return func(r *CCRPutAutoFollowPatternRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f CCRPutAutoFollowPattern) WithPretty() func(*CCRPutAutoFollowPatternRequest) { return func(r *CCRPutAutoFollowPatternRequest) { diff --git a/esapi/api.xpack.ccr.resume_auto_follow_pattern.go b/esapi/api.xpack.ccr.resume_auto_follow_pattern.go index a921f57431..c7dcb8bbdf 100644 --- a/esapi/api.xpack.ccr.resume_auto_follow_pattern.go +++ b/esapi/api.xpack.ccr.resume_auto_follow_pattern.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newCCRResumeAutoFollowPatternFunc(t Transport) CCRResumeAutoFollowPattern { @@ -51,6 +52,8 @@ type CCRResumeAutoFollowPattern func(name string, o ...func(*CCRResumeAutoFollow type CCRResumeAutoFollowPatternRequest struct { Name string + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -98,6 +101,10 @@ func (r CCRResumeAutoFollowPatternRequest) Do(providedCtx context.Context, trans params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -176,6 +183,13 @@ func (f CCRResumeAutoFollowPattern) WithContext(v context.Context) func(*CCRResu } } +// WithMasterTimeout - explicit operation timeout for connection to master node. +func (f CCRResumeAutoFollowPattern) WithMasterTimeout(v time.Duration) func(*CCRResumeAutoFollowPatternRequest) { + return func(r *CCRResumeAutoFollowPatternRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f CCRResumeAutoFollowPattern) WithPretty() func(*CCRResumeAutoFollowPatternRequest) { return func(r *CCRResumeAutoFollowPatternRequest) { diff --git a/esapi/api.xpack.ccr.resume_follow.go b/esapi/api.xpack.ccr.resume_follow.go index f80c62d8da..43de1ea569 100644 --- a/esapi/api.xpack.ccr.resume_follow.go +++ b/esapi/api.xpack.ccr.resume_follow.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -24,6 +24,7 @@ import ( "io" "net/http" "strings" + "time" ) func newCCRResumeFollowFunc(t Transport) CCRResumeFollow { @@ -54,6 +55,8 @@ type CCRResumeFollowRequest struct { Body io.Reader + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -99,6 +102,10 @@ func (r CCRResumeFollowRequest) Do(providedCtx context.Context, transport Transp params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -191,6 +198,13 @@ func (f CCRResumeFollow) WithBody(v io.Reader) func(*CCRResumeFollowRequest) { } } +// WithMasterTimeout - explicit operation timeout for connection to master node. +func (f CCRResumeFollow) WithMasterTimeout(v time.Duration) func(*CCRResumeFollowRequest) { + return func(r *CCRResumeFollowRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f CCRResumeFollow) WithPretty() func(*CCRResumeFollowRequest) { return func(r *CCRResumeFollowRequest) { diff --git a/esapi/api.xpack.ccr.stats.go b/esapi/api.xpack.ccr.stats.go index 4d3746fb28..8090effdea 100644 --- a/esapi/api.xpack.ccr.stats.go +++ b/esapi/api.xpack.ccr.stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newCCRStatsFunc(t Transport) CCRStats { @@ -49,6 +50,9 @@ type CCRStats func(o ...func(*CCRStatsRequest)) (*Response, error) // CCRStatsRequest configures the CCR Stats API request. type CCRStatsRequest struct { + MasterTimeout time.Duration + Timeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -86,6 +90,14 @@ func (r CCRStatsRequest) Do(providedCtx context.Context, transport Transport) (* params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + + if r.Timeout != 0 { + params["timeout"] = formatDuration(r.Timeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -164,6 +176,20 @@ func (f CCRStats) WithContext(v context.Context) func(*CCRStatsRequest) { } } +// WithMasterTimeout - explicit operation timeout for connection to master node. +func (f CCRStats) WithMasterTimeout(v time.Duration) func(*CCRStatsRequest) { + return func(r *CCRStatsRequest) { + r.MasterTimeout = v + } +} + +// WithTimeout - explicit operation timeout. +func (f CCRStats) WithTimeout(v time.Duration) func(*CCRStatsRequest) { + return func(r *CCRStatsRequest) { + r.Timeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f CCRStats) WithPretty() func(*CCRStatsRequest) { return func(r *CCRStatsRequest) { diff --git a/esapi/api.xpack.ccr.unfollow.go b/esapi/api.xpack.ccr.unfollow.go index 174ff55b54..fd9d53e267 100644 --- a/esapi/api.xpack.ccr.unfollow.go +++ b/esapi/api.xpack.ccr.unfollow.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newCCRUnfollowFunc(t Transport) CCRUnfollow { @@ -51,6 +52,8 @@ type CCRUnfollow func(index string, o ...func(*CCRUnfollowRequest)) (*Response, type CCRUnfollowRequest struct { Index string + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -96,6 +99,10 @@ func (r CCRUnfollowRequest) Do(providedCtx context.Context, transport Transport) params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -174,6 +181,13 @@ func (f CCRUnfollow) WithContext(v context.Context) func(*CCRUnfollowRequest) { } } +// WithMasterTimeout - explicit operation timeout for connection to master node. +func (f CCRUnfollow) WithMasterTimeout(v time.Duration) func(*CCRUnfollowRequest) { + return func(r *CCRUnfollowRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f CCRUnfollow) WithPretty() func(*CCRUnfollowRequest) { return func(r *CCRUnfollowRequest) { diff --git a/esapi/api.xpack.close_point_in_time.go b/esapi/api.xpack.close_point_in_time.go index d875fca385..9d583a24a4 100644 --- a/esapi/api.xpack.close_point_in_time.go +++ b/esapi/api.xpack.close_point_in_time.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.enrich.delete_policy.go b/esapi/api.xpack.enrich.delete_policy.go index ab842a6755..75c114e1c6 100644 --- a/esapi/api.xpack.enrich.delete_policy.go +++ b/esapi/api.xpack.enrich.delete_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newEnrichDeletePolicyFunc(t Transport) EnrichDeletePolicy { @@ -51,6 +52,8 @@ type EnrichDeletePolicy func(name string, o ...func(*EnrichDeletePolicyRequest)) type EnrichDeletePolicyRequest struct { Name string + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -96,6 +99,10 @@ func (r EnrichDeletePolicyRequest) Do(providedCtx context.Context, transport Tra params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -174,6 +181,13 @@ func (f EnrichDeletePolicy) WithContext(v context.Context) func(*EnrichDeletePol } } +// WithMasterTimeout - timeout for processing on master node. +func (f EnrichDeletePolicy) WithMasterTimeout(v time.Duration) func(*EnrichDeletePolicyRequest) { + return func(r *EnrichDeletePolicyRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f EnrichDeletePolicy) WithPretty() func(*EnrichDeletePolicyRequest) { return func(r *EnrichDeletePolicyRequest) { diff --git a/esapi/api.xpack.enrich.execute_policy.go b/esapi/api.xpack.enrich.execute_policy.go index 49bce41da1..c87bf71daf 100644 --- a/esapi/api.xpack.enrich.execute_policy.go +++ b/esapi/api.xpack.enrich.execute_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -24,6 +24,7 @@ import ( "net/http" "strconv" "strings" + "time" ) func newEnrichExecutePolicyFunc(t Transport) EnrichExecutePolicy { @@ -52,6 +53,7 @@ type EnrichExecutePolicy func(name string, o ...func(*EnrichExecutePolicyRequest type EnrichExecutePolicyRequest struct { Name string + MasterTimeout time.Duration WaitForCompletion *bool Pretty bool @@ -101,6 +103,10 @@ func (r EnrichExecutePolicyRequest) Do(providedCtx context.Context, transport Tr params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.WaitForCompletion != nil { params["wait_for_completion"] = strconv.FormatBool(*r.WaitForCompletion) } @@ -183,6 +189,13 @@ func (f EnrichExecutePolicy) WithContext(v context.Context) func(*EnrichExecuteP } } +// WithMasterTimeout - timeout for processing on master node. +func (f EnrichExecutePolicy) WithMasterTimeout(v time.Duration) func(*EnrichExecutePolicyRequest) { + return func(r *EnrichExecutePolicyRequest) { + r.MasterTimeout = v + } +} + // WithWaitForCompletion - should the request should block until the execution is complete.. func (f EnrichExecutePolicy) WithWaitForCompletion(v bool) func(*EnrichExecutePolicyRequest) { return func(r *EnrichExecutePolicyRequest) { diff --git a/esapi/api.xpack.enrich.get_policy.go b/esapi/api.xpack.enrich.get_policy.go index 1401c2f25e..335931c0a4 100644 --- a/esapi/api.xpack.enrich.get_policy.go +++ b/esapi/api.xpack.enrich.get_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newEnrichGetPolicyFunc(t Transport) EnrichGetPolicy { @@ -51,6 +52,8 @@ type EnrichGetPolicy func(o ...func(*EnrichGetPolicyRequest)) (*Response, error) type EnrichGetPolicyRequest struct { Name []string + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -98,6 +101,10 @@ func (r EnrichGetPolicyRequest) Do(providedCtx context.Context, transport Transp params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -183,6 +190,13 @@ func (f EnrichGetPolicy) WithName(v ...string) func(*EnrichGetPolicyRequest) { } } +// WithMasterTimeout - timeout for processing on master node. +func (f EnrichGetPolicy) WithMasterTimeout(v time.Duration) func(*EnrichGetPolicyRequest) { + return func(r *EnrichGetPolicyRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f EnrichGetPolicy) WithPretty() func(*EnrichGetPolicyRequest) { return func(r *EnrichGetPolicyRequest) { diff --git a/esapi/api.xpack.enrich.put_policy.go b/esapi/api.xpack.enrich.put_policy.go index 8b1b8b9b3e..5466ca37d1 100644 --- a/esapi/api.xpack.enrich.put_policy.go +++ b/esapi/api.xpack.enrich.put_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -24,6 +24,7 @@ import ( "io" "net/http" "strings" + "time" ) func newEnrichPutPolicyFunc(t Transport) EnrichPutPolicy { @@ -54,6 +55,8 @@ type EnrichPutPolicyRequest struct { Name string + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -99,6 +102,10 @@ func (r EnrichPutPolicyRequest) Do(providedCtx context.Context, transport Transp params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -184,6 +191,13 @@ func (f EnrichPutPolicy) WithContext(v context.Context) func(*EnrichPutPolicyReq } } +// WithMasterTimeout - timeout for processing on master node. +func (f EnrichPutPolicy) WithMasterTimeout(v time.Duration) func(*EnrichPutPolicyRequest) { + return func(r *EnrichPutPolicyRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f EnrichPutPolicy) WithPretty() func(*EnrichPutPolicyRequest) { return func(r *EnrichPutPolicyRequest) { diff --git a/esapi/api.xpack.enrich.stats.go b/esapi/api.xpack.enrich.stats.go index 6283c9afb4..72ebfbe157 100644 --- a/esapi/api.xpack.enrich.stats.go +++ b/esapi/api.xpack.enrich.stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newEnrichStatsFunc(t Transport) EnrichStats { @@ -49,6 +50,8 @@ type EnrichStats func(o ...func(*EnrichStatsRequest)) (*Response, error) // EnrichStatsRequest configures the Enrich Stats API request. type EnrichStatsRequest struct { + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -86,6 +89,10 @@ func (r EnrichStatsRequest) Do(providedCtx context.Context, transport Transport) params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -164,6 +171,13 @@ func (f EnrichStats) WithContext(v context.Context) func(*EnrichStatsRequest) { } } +// WithMasterTimeout - timeout for processing on master node. +func (f EnrichStats) WithMasterTimeout(v time.Duration) func(*EnrichStatsRequest) { + return func(r *EnrichStatsRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f EnrichStats) WithPretty() func(*EnrichStatsRequest) { return func(r *EnrichStatsRequest) { diff --git a/esapi/api.xpack.eql.delete.go b/esapi/api.xpack.eql.delete.go index 5964d6d21f..54f029be53 100644 --- a/esapi/api.xpack.eql.delete.go +++ b/esapi/api.xpack.eql.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.eql.get.go b/esapi/api.xpack.eql.get.go index b6e2dcd954..5d3450382e 100644 --- a/esapi/api.xpack.eql.get.go +++ b/esapi/api.xpack.eql.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.eql.get_status.go b/esapi/api.xpack.eql.get_status.go index 94da2eb425..dbb2fa63fc 100644 --- a/esapi/api.xpack.eql.get_status.go +++ b/esapi/api.xpack.eql.get_status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.eql.search.go b/esapi/api.xpack.eql.search.go index 3c2fcc2e7a..0b7a6ea594 100644 --- a/esapi/api.xpack.eql.search.go +++ b/esapi/api.xpack.eql.search.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.esql.async_query.go b/esapi/api.xpack.esql.async_query.go index 1229bb8905..dd46a7a628 100644 --- a/esapi/api.xpack.esql.async_query.go +++ b/esapi/api.xpack.esql.async_query.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -46,8 +46,6 @@ func newEsqlAsyncQueryFunc(t Transport) EsqlAsyncQuery { // EsqlAsyncQuery - Executes an ESQL request asynchronously // -// This API is experimental. -// // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/master/esql-async-query-api.html. type EsqlAsyncQuery func(body io.Reader, o ...func(*EsqlAsyncQueryRequest)) (*Response, error) diff --git a/esapi/api.xpack.esql.async_query_get.go b/esapi/api.xpack.esql.async_query_get.go index e665359cb4..a389c2275d 100644 --- a/esapi/api.xpack.esql.async_query_get.go +++ b/esapi/api.xpack.esql.async_query_get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.esql.query.go b/esapi/api.xpack.esql.query.go index cb256e2679..56a4945285 100644 --- a/esapi/api.xpack.esql.query.go +++ b/esapi/api.xpack.esql.query.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -46,8 +46,6 @@ func newEsqlQueryFunc(t Transport) EsqlQuery { // EsqlQuery - Executes an ESQL request // -// This API is experimental. -// // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-query-api.html. type EsqlQuery func(body io.Reader, o ...func(*EsqlQueryRequest)) (*Response, error) diff --git a/esapi/api.xpack.graph.explore.go b/esapi/api.xpack.graph.explore.go index 5596006ec1..69d614d6e9 100644 --- a/esapi/api.xpack.graph.explore.go +++ b/esapi/api.xpack.graph.explore.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.delete_lifecycle.go b/esapi/api.xpack.ilm.delete_lifecycle.go index 85a096dfdc..a059173c59 100644 --- a/esapi/api.xpack.ilm.delete_lifecycle.go +++ b/esapi/api.xpack.ilm.delete_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.explain_lifecycle.go b/esapi/api.xpack.ilm.explain_lifecycle.go index 057139de7f..f1becd01c7 100644 --- a/esapi/api.xpack.ilm.explain_lifecycle.go +++ b/esapi/api.xpack.ilm.explain_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.get_lifecycle.go b/esapi/api.xpack.ilm.get_lifecycle.go index 7d418e6e42..a8d5cfba4c 100644 --- a/esapi/api.xpack.ilm.get_lifecycle.go +++ b/esapi/api.xpack.ilm.get_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.get_status.go b/esapi/api.xpack.ilm.get_status.go index 05fb3acf39..cb6ce76038 100644 --- a/esapi/api.xpack.ilm.get_status.go +++ b/esapi/api.xpack.ilm.get_status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.migrate_to_data_tiers.go b/esapi/api.xpack.ilm.migrate_to_data_tiers.go index a6c8cb684a..d0c7d4cf24 100644 --- a/esapi/api.xpack.ilm.migrate_to_data_tiers.go +++ b/esapi/api.xpack.ilm.migrate_to_data_tiers.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.move_to_step.go b/esapi/api.xpack.ilm.move_to_step.go index 00396f352c..0fe22484e7 100644 --- a/esapi/api.xpack.ilm.move_to_step.go +++ b/esapi/api.xpack.ilm.move_to_step.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.put_lifecycle.go b/esapi/api.xpack.ilm.put_lifecycle.go index 4ebe7d3a28..394bb45070 100644 --- a/esapi/api.xpack.ilm.put_lifecycle.go +++ b/esapi/api.xpack.ilm.put_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.remove_policy.go b/esapi/api.xpack.ilm.remove_policy.go index a684146824..f3b39d9b8e 100644 --- a/esapi/api.xpack.ilm.remove_policy.go +++ b/esapi/api.xpack.ilm.remove_policy.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.retry.go b/esapi/api.xpack.ilm.retry.go index bb8db09f78..b86c732ba8 100644 --- a/esapi/api.xpack.ilm.retry.go +++ b/esapi/api.xpack.ilm.retry.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.start.go b/esapi/api.xpack.ilm.start.go index 7972e2d811..34c3b0e72c 100644 --- a/esapi/api.xpack.ilm.start.go +++ b/esapi/api.xpack.ilm.start.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ilm.stop.go b/esapi/api.xpack.ilm.stop.go index 83cf478b94..c5ac80ecd5 100644 --- a/esapi/api.xpack.ilm.stop.go +++ b/esapi/api.xpack.ilm.stop.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.create_data_stream.go b/esapi/api.xpack.indices.create_data_stream.go index b45cb5536d..6ee7ea7dba 100644 --- a/esapi/api.xpack.indices.create_data_stream.go +++ b/esapi/api.xpack.indices.create_data_stream.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.data_streams_stats.go b/esapi/api.xpack.indices.data_streams_stats.go index 2bc2b029e2..92d1042208 100644 --- a/esapi/api.xpack.indices.data_streams_stats.go +++ b/esapi/api.xpack.indices.data_streams_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.delete_data_stream.go b/esapi/api.xpack.indices.delete_data_stream.go index e6f88cce4a..8a4a5b8567 100644 --- a/esapi/api.xpack.indices.delete_data_stream.go +++ b/esapi/api.xpack.indices.delete_data_stream.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.get_data_stream.go b/esapi/api.xpack.indices.get_data_stream.go index 8a2b70f5c3..18f32ffb28 100644 --- a/esapi/api.xpack.indices.get_data_stream.go +++ b/esapi/api.xpack.indices.get_data_stream.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.migrate_to_data_stream.go b/esapi/api.xpack.indices.migrate_to_data_stream.go index 306df6f90d..87fda02d7d 100644 --- a/esapi/api.xpack.indices.migrate_to_data_stream.go +++ b/esapi/api.xpack.indices.migrate_to_data_stream.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.promote_data_stream.go b/esapi/api.xpack.indices.promote_data_stream.go index fc51510728..2c727c3ebb 100644 --- a/esapi/api.xpack.indices.promote_data_stream.go +++ b/esapi/api.xpack.indices.promote_data_stream.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.reload_search_analyzers.go b/esapi/api.xpack.indices.reload_search_analyzers.go index d471cdd9a3..1d8da80a84 100644 --- a/esapi/api.xpack.indices.reload_search_analyzers.go +++ b/esapi/api.xpack.indices.reload_search_analyzers.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.indices.unfreeze.go b/esapi/api.xpack.indices.unfreeze.go index e50d6d0e19..c9ae2ae114 100644 --- a/esapi/api.xpack.indices.unfreeze.go +++ b/esapi/api.xpack.indices.unfreeze.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.license.delete.go b/esapi/api.xpack.license.delete.go index abbb52902c..93d83e61b3 100644 --- a/esapi/api.xpack.license.delete.go +++ b/esapi/api.xpack.license.delete.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newLicenseDeleteFunc(t Transport) LicenseDelete { @@ -49,6 +50,9 @@ type LicenseDelete func(o ...func(*LicenseDeleteRequest)) (*Response, error) // LicenseDeleteRequest configures the License Delete API request. type LicenseDeleteRequest struct { + MasterTimeout time.Duration + Timeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -86,6 +90,14 @@ func (r LicenseDeleteRequest) Do(providedCtx context.Context, transport Transpor params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + + if r.Timeout != 0 { + params["timeout"] = formatDuration(r.Timeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -164,6 +176,20 @@ func (f LicenseDelete) WithContext(v context.Context) func(*LicenseDeleteRequest } } +// WithMasterTimeout - timeout for processing on master node. +func (f LicenseDelete) WithMasterTimeout(v time.Duration) func(*LicenseDeleteRequest) { + return func(r *LicenseDeleteRequest) { + r.MasterTimeout = v + } +} + +// WithTimeout - timeout for acknowledgement of update from all nodes in cluster. +func (f LicenseDelete) WithTimeout(v time.Duration) func(*LicenseDeleteRequest) { + return func(r *LicenseDeleteRequest) { + r.Timeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f LicenseDelete) WithPretty() func(*LicenseDeleteRequest) { return func(r *LicenseDeleteRequest) { diff --git a/esapi/api.xpack.license.get.go b/esapi/api.xpack.license.get.go index b13c842000..3f85fcaa8e 100644 --- a/esapi/api.xpack.license.get.go +++ b/esapi/api.xpack.license.get.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.license.get_basic_status.go b/esapi/api.xpack.license.get_basic_status.go index f51563a73a..6b61df22db 100644 --- a/esapi/api.xpack.license.get_basic_status.go +++ b/esapi/api.xpack.license.get_basic_status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.license.get_trial_status.go b/esapi/api.xpack.license.get_trial_status.go index 328cf1bf88..53fbccca95 100644 --- a/esapi/api.xpack.license.get_trial_status.go +++ b/esapi/api.xpack.license.get_trial_status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.license.post.go b/esapi/api.xpack.license.post.go index e30f2b2ebf..157dafe6ca 100644 --- a/esapi/api.xpack.license.post.go +++ b/esapi/api.xpack.license.post.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -25,6 +25,7 @@ import ( "net/http" "strconv" "strings" + "time" ) func newLicensePostFunc(t Transport) LicensePost { @@ -53,7 +54,9 @@ type LicensePost func(o ...func(*LicensePostRequest)) (*Response, error) type LicensePostRequest struct { Body io.Reader - Acknowledge *bool + Acknowledge *bool + MasterTimeout time.Duration + Timeout time.Duration Pretty bool Human bool @@ -96,6 +99,14 @@ func (r LicensePostRequest) Do(providedCtx context.Context, transport Transport) params["acknowledge"] = strconv.FormatBool(*r.Acknowledge) } + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + + if r.Timeout != 0 { + params["timeout"] = formatDuration(r.Timeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -195,6 +206,20 @@ func (f LicensePost) WithAcknowledge(v bool) func(*LicensePostRequest) { } } +// WithMasterTimeout - timeout for processing on master node. +func (f LicensePost) WithMasterTimeout(v time.Duration) func(*LicensePostRequest) { + return func(r *LicensePostRequest) { + r.MasterTimeout = v + } +} + +// WithTimeout - timeout for acknowledgement of update from all nodes in cluster. +func (f LicensePost) WithTimeout(v time.Duration) func(*LicensePostRequest) { + return func(r *LicensePostRequest) { + r.Timeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f LicensePost) WithPretty() func(*LicensePostRequest) { return func(r *LicensePostRequest) { diff --git a/esapi/api.xpack.license.post_start_basic.go b/esapi/api.xpack.license.post_start_basic.go index dc2aff5a71..d6f6c2158f 100644 --- a/esapi/api.xpack.license.post_start_basic.go +++ b/esapi/api.xpack.license.post_start_basic.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -24,6 +24,7 @@ import ( "net/http" "strconv" "strings" + "time" ) func newLicensePostStartBasicFunc(t Transport) LicensePostStartBasic { @@ -50,7 +51,9 @@ type LicensePostStartBasic func(o ...func(*LicensePostStartBasicRequest)) (*Resp // LicensePostStartBasicRequest configures the License Post Start Basic API request. type LicensePostStartBasicRequest struct { - Acknowledge *bool + Acknowledge *bool + MasterTimeout time.Duration + Timeout time.Duration Pretty bool Human bool @@ -93,6 +96,14 @@ func (r LicensePostStartBasicRequest) Do(providedCtx context.Context, transport params["acknowledge"] = strconv.FormatBool(*r.Acknowledge) } + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + + if r.Timeout != 0 { + params["timeout"] = formatDuration(r.Timeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -178,6 +189,20 @@ func (f LicensePostStartBasic) WithAcknowledge(v bool) func(*LicensePostStartBas } } +// WithMasterTimeout - timeout for processing on master node. +func (f LicensePostStartBasic) WithMasterTimeout(v time.Duration) func(*LicensePostStartBasicRequest) { + return func(r *LicensePostStartBasicRequest) { + r.MasterTimeout = v + } +} + +// WithTimeout - timeout for acknowledgement of update from all nodes in cluster. +func (f LicensePostStartBasic) WithTimeout(v time.Duration) func(*LicensePostStartBasicRequest) { + return func(r *LicensePostStartBasicRequest) { + r.Timeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f LicensePostStartBasic) WithPretty() func(*LicensePostStartBasicRequest) { return func(r *LicensePostStartBasicRequest) { diff --git a/esapi/api.xpack.license.post_start_trial.go b/esapi/api.xpack.license.post_start_trial.go index 6bd1eb97fb..1c2967d80b 100644 --- a/esapi/api.xpack.license.post_start_trial.go +++ b/esapi/api.xpack.license.post_start_trial.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -24,6 +24,7 @@ import ( "net/http" "strconv" "strings" + "time" ) func newLicensePostStartTrialFunc(t Transport) LicensePostStartTrial { @@ -50,8 +51,10 @@ type LicensePostStartTrial func(o ...func(*LicensePostStartTrialRequest)) (*Resp // LicensePostStartTrialRequest configures the License Post Start Trial API request. type LicensePostStartTrialRequest struct { - Acknowledge *bool - DocumentType string + Acknowledge *bool + MasterTimeout time.Duration + Timeout time.Duration + DocumentType string Pretty bool Human bool @@ -94,6 +97,14 @@ func (r LicensePostStartTrialRequest) Do(providedCtx context.Context, transport params["acknowledge"] = strconv.FormatBool(*r.Acknowledge) } + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + + if r.Timeout != 0 { + params["timeout"] = formatDuration(r.Timeout) + } + if r.DocumentType != "" { params["type"] = r.DocumentType } @@ -183,6 +194,20 @@ func (f LicensePostStartTrial) WithAcknowledge(v bool) func(*LicensePostStartTri } } +// WithMasterTimeout - timeout for processing on master node. +func (f LicensePostStartTrial) WithMasterTimeout(v time.Duration) func(*LicensePostStartTrialRequest) { + return func(r *LicensePostStartTrialRequest) { + r.MasterTimeout = v + } +} + +// WithTimeout - timeout for acknowledgement of update from all nodes in cluster. +func (f LicensePostStartTrial) WithTimeout(v time.Duration) func(*LicensePostStartTrialRequest) { + return func(r *LicensePostStartTrialRequest) { + r.Timeout = v + } +} + // WithDocumentType - the type of trial license to generate (default: "trial"). func (f LicensePostStartTrial) WithDocumentType(v string) func(*LicensePostStartTrialRequest) { return func(r *LicensePostStartTrialRequest) { diff --git a/esapi/api.xpack.logstash.delete_pipeline.go b/esapi/api.xpack.logstash.delete_pipeline.go index c7eb3e4849..35e1372e2e 100644 --- a/esapi/api.xpack.logstash.delete_pipeline.go +++ b/esapi/api.xpack.logstash.delete_pipeline.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.logstash.get_pipeline.go b/esapi/api.xpack.logstash.get_pipeline.go index eba3e555b9..a576c9e431 100644 --- a/esapi/api.xpack.logstash.get_pipeline.go +++ b/esapi/api.xpack.logstash.get_pipeline.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.logstash.put_pipeline.go b/esapi/api.xpack.logstash.put_pipeline.go index 849cfb6490..6432ca21ee 100644 --- a/esapi/api.xpack.logstash.put_pipeline.go +++ b/esapi/api.xpack.logstash.put_pipeline.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.migration.deprecations.go b/esapi/api.xpack.migration.deprecations.go index 372967f688..e789065e13 100644 --- a/esapi/api.xpack.migration.deprecations.go +++ b/esapi/api.xpack.migration.deprecations.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.migration.get_feature_upgrade_status.go b/esapi/api.xpack.migration.get_feature_upgrade_status.go index 4b76a1bcbd..4463595b55 100644 --- a/esapi/api.xpack.migration.get_feature_upgrade_status.go +++ b/esapi/api.xpack.migration.get_feature_upgrade_status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.migration.post_feature_upgrade.go b/esapi/api.xpack.migration.post_feature_upgrade.go index df06c2cc8f..8c27b95be9 100644 --- a/esapi/api.xpack.migration.post_feature_upgrade.go +++ b/esapi/api.xpack.migration.post_feature_upgrade.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.clear_trained_model_deployment_cache.go b/esapi/api.xpack.ml.clear_trained_model_deployment_cache.go index c2ffde1af3..1041353587 100644 --- a/esapi/api.xpack.ml.clear_trained_model_deployment_cache.go +++ b/esapi/api.xpack.ml.clear_trained_model_deployment_cache.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.close_job.go b/esapi/api.xpack.ml.close_job.go index 65537098b1..f3b36fe166 100644 --- a/esapi/api.xpack.ml.close_job.go +++ b/esapi/api.xpack.ml.close_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_calendar.go b/esapi/api.xpack.ml.delete_calendar.go index bd090d9c9b..049818542b 100644 --- a/esapi/api.xpack.ml.delete_calendar.go +++ b/esapi/api.xpack.ml.delete_calendar.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_calendar_event.go b/esapi/api.xpack.ml.delete_calendar_event.go index 4562aab922..da193d0d46 100644 --- a/esapi/api.xpack.ml.delete_calendar_event.go +++ b/esapi/api.xpack.ml.delete_calendar_event.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_calendar_job.go b/esapi/api.xpack.ml.delete_calendar_job.go index 63a71a5c7d..a9cedb97d9 100644 --- a/esapi/api.xpack.ml.delete_calendar_job.go +++ b/esapi/api.xpack.ml.delete_calendar_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_data_frame_analytics.go b/esapi/api.xpack.ml.delete_data_frame_analytics.go index 2723219297..ef94fc4166 100644 --- a/esapi/api.xpack.ml.delete_data_frame_analytics.go +++ b/esapi/api.xpack.ml.delete_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_datafeed.go b/esapi/api.xpack.ml.delete_datafeed.go index 1f2307c4fc..1c2bac6ea8 100644 --- a/esapi/api.xpack.ml.delete_datafeed.go +++ b/esapi/api.xpack.ml.delete_datafeed.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_expired_data.go b/esapi/api.xpack.ml.delete_expired_data.go index ccd4b6dd72..bff2f28351 100644 --- a/esapi/api.xpack.ml.delete_expired_data.go +++ b/esapi/api.xpack.ml.delete_expired_data.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_filter.go b/esapi/api.xpack.ml.delete_filter.go index ddfb85c5f3..e434f77740 100644 --- a/esapi/api.xpack.ml.delete_filter.go +++ b/esapi/api.xpack.ml.delete_filter.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_forecast.go b/esapi/api.xpack.ml.delete_forecast.go index d7132ad170..80e769a209 100644 --- a/esapi/api.xpack.ml.delete_forecast.go +++ b/esapi/api.xpack.ml.delete_forecast.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_job.go b/esapi/api.xpack.ml.delete_job.go index 6632c8de52..1ba3abe721 100644 --- a/esapi/api.xpack.ml.delete_job.go +++ b/esapi/api.xpack.ml.delete_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_model_snapshot.go b/esapi/api.xpack.ml.delete_model_snapshot.go index d489e8db05..fad6a752fb 100644 --- a/esapi/api.xpack.ml.delete_model_snapshot.go +++ b/esapi/api.xpack.ml.delete_model_snapshot.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_trained_model.go b/esapi/api.xpack.ml.delete_trained_model.go index 1abe93a31b..f140ce7428 100644 --- a/esapi/api.xpack.ml.delete_trained_model.go +++ b/esapi/api.xpack.ml.delete_trained_model.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.delete_trained_model_alias.go b/esapi/api.xpack.ml.delete_trained_model_alias.go index 3229ae90a8..c5237d669c 100644 --- a/esapi/api.xpack.ml.delete_trained_model_alias.go +++ b/esapi/api.xpack.ml.delete_trained_model_alias.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.estimate_model_memory.go b/esapi/api.xpack.ml.estimate_model_memory.go index 7b76bd03ca..5595dcd136 100644 --- a/esapi/api.xpack.ml.estimate_model_memory.go +++ b/esapi/api.xpack.ml.estimate_model_memory.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.evaluate_data_frame.go b/esapi/api.xpack.ml.evaluate_data_frame.go index a08de4b6db..73ce6983cf 100644 --- a/esapi/api.xpack.ml.evaluate_data_frame.go +++ b/esapi/api.xpack.ml.evaluate_data_frame.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.explain_data_frame_analytics.go b/esapi/api.xpack.ml.explain_data_frame_analytics.go index 860eab832c..8b7c1c7525 100644 --- a/esapi/api.xpack.ml.explain_data_frame_analytics.go +++ b/esapi/api.xpack.ml.explain_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.flush_job.go b/esapi/api.xpack.ml.flush_job.go index a76d497ff7..97280d8099 100644 --- a/esapi/api.xpack.ml.flush_job.go +++ b/esapi/api.xpack.ml.flush_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.forecast.go b/esapi/api.xpack.ml.forecast.go index 737b5f0733..a4c5643b9a 100644 --- a/esapi/api.xpack.ml.forecast.go +++ b/esapi/api.xpack.ml.forecast.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_buckets.go b/esapi/api.xpack.ml.get_buckets.go index d671264466..42b16fdd78 100644 --- a/esapi/api.xpack.ml.get_buckets.go +++ b/esapi/api.xpack.ml.get_buckets.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_calendar_events.go b/esapi/api.xpack.ml.get_calendar_events.go index 87de19fd1e..e526f9d5f5 100644 --- a/esapi/api.xpack.ml.get_calendar_events.go +++ b/esapi/api.xpack.ml.get_calendar_events.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_calendars.go b/esapi/api.xpack.ml.get_calendars.go index 2207bd8c51..0e21933eda 100644 --- a/esapi/api.xpack.ml.get_calendars.go +++ b/esapi/api.xpack.ml.get_calendars.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_categories.go b/esapi/api.xpack.ml.get_categories.go index 9ac0cbb79e..ea07c0e566 100644 --- a/esapi/api.xpack.ml.get_categories.go +++ b/esapi/api.xpack.ml.get_categories.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_data_frame_analytics.go b/esapi/api.xpack.ml.get_data_frame_analytics.go index ef68e97116..0d497bd2c7 100644 --- a/esapi/api.xpack.ml.get_data_frame_analytics.go +++ b/esapi/api.xpack.ml.get_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_data_frame_analytics_stats.go b/esapi/api.xpack.ml.get_data_frame_analytics_stats.go index 5c937156bb..e621a11226 100644 --- a/esapi/api.xpack.ml.get_data_frame_analytics_stats.go +++ b/esapi/api.xpack.ml.get_data_frame_analytics_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_datafeed_stats.go b/esapi/api.xpack.ml.get_datafeed_stats.go index bee19c5c53..7eada66764 100644 --- a/esapi/api.xpack.ml.get_datafeed_stats.go +++ b/esapi/api.xpack.ml.get_datafeed_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_datafeeds.go b/esapi/api.xpack.ml.get_datafeeds.go index b16e787ee3..053546b777 100644 --- a/esapi/api.xpack.ml.get_datafeeds.go +++ b/esapi/api.xpack.ml.get_datafeeds.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_filters.go b/esapi/api.xpack.ml.get_filters.go index c673814aa3..2e654353b0 100644 --- a/esapi/api.xpack.ml.get_filters.go +++ b/esapi/api.xpack.ml.get_filters.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_influencers.go b/esapi/api.xpack.ml.get_influencers.go index 877f5b0c14..06e50f60f4 100644 --- a/esapi/api.xpack.ml.get_influencers.go +++ b/esapi/api.xpack.ml.get_influencers.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_job_stats.go b/esapi/api.xpack.ml.get_job_stats.go index c596a2bde1..fc9ab71c80 100644 --- a/esapi/api.xpack.ml.get_job_stats.go +++ b/esapi/api.xpack.ml.get_job_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_jobs.go b/esapi/api.xpack.ml.get_jobs.go index ba0a801ba8..639ff703e3 100644 --- a/esapi/api.xpack.ml.get_jobs.go +++ b/esapi/api.xpack.ml.get_jobs.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_memory_stats.go b/esapi/api.xpack.ml.get_memory_stats.go index 6646333883..d546aa2576 100644 --- a/esapi/api.xpack.ml.get_memory_stats.go +++ b/esapi/api.xpack.ml.get_memory_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_model_snapshot_upgrade_stats.go b/esapi/api.xpack.ml.get_model_snapshot_upgrade_stats.go index f71ad28324..7209b72bd1 100644 --- a/esapi/api.xpack.ml.get_model_snapshot_upgrade_stats.go +++ b/esapi/api.xpack.ml.get_model_snapshot_upgrade_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_model_snapshots.go b/esapi/api.xpack.ml.get_model_snapshots.go index b49e1d5090..677337922a 100644 --- a/esapi/api.xpack.ml.get_model_snapshots.go +++ b/esapi/api.xpack.ml.get_model_snapshots.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_overall_buckets.go b/esapi/api.xpack.ml.get_overall_buckets.go index 2c3f394b1b..3d34418fa7 100644 --- a/esapi/api.xpack.ml.get_overall_buckets.go +++ b/esapi/api.xpack.ml.get_overall_buckets.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_records.go b/esapi/api.xpack.ml.get_records.go index 57093fe798..3b028f2901 100644 --- a/esapi/api.xpack.ml.get_records.go +++ b/esapi/api.xpack.ml.get_records.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_trained_models.go b/esapi/api.xpack.ml.get_trained_models.go index a73262bfdc..9547ecd370 100644 --- a/esapi/api.xpack.ml.get_trained_models.go +++ b/esapi/api.xpack.ml.get_trained_models.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.get_trained_models_stats.go b/esapi/api.xpack.ml.get_trained_models_stats.go index d506053ce6..40800d943b 100644 --- a/esapi/api.xpack.ml.get_trained_models_stats.go +++ b/esapi/api.xpack.ml.get_trained_models_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.infer_trained_model.go b/esapi/api.xpack.ml.infer_trained_model.go index b9b94dd1bc..9846c64d91 100644 --- a/esapi/api.xpack.ml.infer_trained_model.go +++ b/esapi/api.xpack.ml.infer_trained_model.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.info.go b/esapi/api.xpack.ml.info.go index ea32e94f78..0d6970ca09 100644 --- a/esapi/api.xpack.ml.info.go +++ b/esapi/api.xpack.ml.info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.open_job.go b/esapi/api.xpack.ml.open_job.go index 35b6d75e7f..6ba4b27519 100644 --- a/esapi/api.xpack.ml.open_job.go +++ b/esapi/api.xpack.ml.open_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.post_calendar_events.go b/esapi/api.xpack.ml.post_calendar_events.go index 725e8906b1..b1681584e3 100644 --- a/esapi/api.xpack.ml.post_calendar_events.go +++ b/esapi/api.xpack.ml.post_calendar_events.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.post_data.go b/esapi/api.xpack.ml.post_data.go index 840c15dd0e..11116f82a8 100644 --- a/esapi/api.xpack.ml.post_data.go +++ b/esapi/api.xpack.ml.post_data.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.preview_data_frame_analytics.go b/esapi/api.xpack.ml.preview_data_frame_analytics.go index 475cb00feb..bb7c032f5c 100644 --- a/esapi/api.xpack.ml.preview_data_frame_analytics.go +++ b/esapi/api.xpack.ml.preview_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.preview_datafeed.go b/esapi/api.xpack.ml.preview_datafeed.go index 925f2d133e..93cc5aedd6 100644 --- a/esapi/api.xpack.ml.preview_datafeed.go +++ b/esapi/api.xpack.ml.preview_datafeed.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_calendar.go b/esapi/api.xpack.ml.put_calendar.go index 0aae0d4289..bd6105ad8d 100644 --- a/esapi/api.xpack.ml.put_calendar.go +++ b/esapi/api.xpack.ml.put_calendar.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_calendar_job.go b/esapi/api.xpack.ml.put_calendar_job.go index a396c30b03..f89de7a8a0 100644 --- a/esapi/api.xpack.ml.put_calendar_job.go +++ b/esapi/api.xpack.ml.put_calendar_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_data_frame_analytics.go b/esapi/api.xpack.ml.put_data_frame_analytics.go index 1d837e154f..a89ba16aed 100644 --- a/esapi/api.xpack.ml.put_data_frame_analytics.go +++ b/esapi/api.xpack.ml.put_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_datafeed.go b/esapi/api.xpack.ml.put_datafeed.go index 4b5a8d1e65..fd2a1baa4f 100644 --- a/esapi/api.xpack.ml.put_datafeed.go +++ b/esapi/api.xpack.ml.put_datafeed.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_filter.go b/esapi/api.xpack.ml.put_filter.go index 9209a584f9..933b7426bc 100644 --- a/esapi/api.xpack.ml.put_filter.go +++ b/esapi/api.xpack.ml.put_filter.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_job.go b/esapi/api.xpack.ml.put_job.go index 944d7d310f..6a2bf3bfd4 100644 --- a/esapi/api.xpack.ml.put_job.go +++ b/esapi/api.xpack.ml.put_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_trained_model.go b/esapi/api.xpack.ml.put_trained_model.go index 8cb4e5fdfd..189ed45397 100644 --- a/esapi/api.xpack.ml.put_trained_model.go +++ b/esapi/api.xpack.ml.put_trained_model.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_trained_model_alias.go b/esapi/api.xpack.ml.put_trained_model_alias.go index e1a1f7a00a..4152cb35dd 100644 --- a/esapi/api.xpack.ml.put_trained_model_alias.go +++ b/esapi/api.xpack.ml.put_trained_model_alias.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_trained_model_definition_part.go b/esapi/api.xpack.ml.put_trained_model_definition_part.go index 608870098d..a6fd32adcb 100644 --- a/esapi/api.xpack.ml.put_trained_model_definition_part.go +++ b/esapi/api.xpack.ml.put_trained_model_definition_part.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.put_trained_model_vocabulary.go b/esapi/api.xpack.ml.put_trained_model_vocabulary.go index aadde2c5c2..effcf41842 100644 --- a/esapi/api.xpack.ml.put_trained_model_vocabulary.go +++ b/esapi/api.xpack.ml.put_trained_model_vocabulary.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.reset_job.go b/esapi/api.xpack.ml.reset_job.go index 4458e35cf6..de57bcb3ca 100644 --- a/esapi/api.xpack.ml.reset_job.go +++ b/esapi/api.xpack.ml.reset_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.revert_model_snapshot.go b/esapi/api.xpack.ml.revert_model_snapshot.go index 343944f0ed..332f9a8e76 100644 --- a/esapi/api.xpack.ml.revert_model_snapshot.go +++ b/esapi/api.xpack.ml.revert_model_snapshot.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.set_upgrade_mode.go b/esapi/api.xpack.ml.set_upgrade_mode.go index 3cfb610e91..40928d798e 100644 --- a/esapi/api.xpack.ml.set_upgrade_mode.go +++ b/esapi/api.xpack.ml.set_upgrade_mode.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.start_data_frame_analytics.go b/esapi/api.xpack.ml.start_data_frame_analytics.go index 1e6b6eac4f..5512909a26 100644 --- a/esapi/api.xpack.ml.start_data_frame_analytics.go +++ b/esapi/api.xpack.ml.start_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.start_datafeed.go b/esapi/api.xpack.ml.start_datafeed.go index 1da322db37..f7d4bdf239 100644 --- a/esapi/api.xpack.ml.start_datafeed.go +++ b/esapi/api.xpack.ml.start_datafeed.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.start_trained_model_deployment.go b/esapi/api.xpack.ml.start_trained_model_deployment.go index d395539eba..3122db56de 100644 --- a/esapi/api.xpack.ml.start_trained_model_deployment.go +++ b/esapi/api.xpack.ml.start_trained_model_deployment.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.stop_data_frame_analytics.go b/esapi/api.xpack.ml.stop_data_frame_analytics.go index 40815ffa67..b6101cb0d9 100644 --- a/esapi/api.xpack.ml.stop_data_frame_analytics.go +++ b/esapi/api.xpack.ml.stop_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.stop_datafeed.go b/esapi/api.xpack.ml.stop_datafeed.go index 63de96e2a6..d97c85755d 100644 --- a/esapi/api.xpack.ml.stop_datafeed.go +++ b/esapi/api.xpack.ml.stop_datafeed.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.stop_trained_model_deployment.go b/esapi/api.xpack.ml.stop_trained_model_deployment.go index 1bb2278581..299383c030 100644 --- a/esapi/api.xpack.ml.stop_trained_model_deployment.go +++ b/esapi/api.xpack.ml.stop_trained_model_deployment.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.update_data_frame_analytics.go b/esapi/api.xpack.ml.update_data_frame_analytics.go index 228239352e..23ecd1f828 100644 --- a/esapi/api.xpack.ml.update_data_frame_analytics.go +++ b/esapi/api.xpack.ml.update_data_frame_analytics.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.update_datafeed.go b/esapi/api.xpack.ml.update_datafeed.go index fa847fb4d2..2f052fef78 100644 --- a/esapi/api.xpack.ml.update_datafeed.go +++ b/esapi/api.xpack.ml.update_datafeed.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.update_filter.go b/esapi/api.xpack.ml.update_filter.go index 54e5440487..b2dec0e733 100644 --- a/esapi/api.xpack.ml.update_filter.go +++ b/esapi/api.xpack.ml.update_filter.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.update_job.go b/esapi/api.xpack.ml.update_job.go index 6dad1669f0..23eaca3995 100644 --- a/esapi/api.xpack.ml.update_job.go +++ b/esapi/api.xpack.ml.update_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.update_model_snapshot.go b/esapi/api.xpack.ml.update_model_snapshot.go index f53560c0d5..d700fa1b42 100644 --- a/esapi/api.xpack.ml.update_model_snapshot.go +++ b/esapi/api.xpack.ml.update_model_snapshot.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.update_trained_model_deployment.go b/esapi/api.xpack.ml.update_trained_model_deployment.go index fc51ea4340..91998bc907 100644 --- a/esapi/api.xpack.ml.update_trained_model_deployment.go +++ b/esapi/api.xpack.ml.update_trained_model_deployment.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,12 +23,13 @@ import ( "context" "io" "net/http" + "strconv" "strings" ) func newMLUpdateTrainedModelDeploymentFunc(t Transport) MLUpdateTrainedModelDeployment { - return func(body io.Reader, model_id string, o ...func(*MLUpdateTrainedModelDeploymentRequest)) (*Response, error) { - var r = MLUpdateTrainedModelDeploymentRequest{Body: body, ModelID: model_id} + return func(model_id string, o ...func(*MLUpdateTrainedModelDeploymentRequest)) (*Response, error) { + var r = MLUpdateTrainedModelDeploymentRequest{ModelID: model_id} for _, f := range o { f(&r) } @@ -45,10 +46,8 @@ func newMLUpdateTrainedModelDeploymentFunc(t Transport) MLUpdateTrainedModelDepl // MLUpdateTrainedModelDeployment - Updates certain properties of trained model deployment. // -// This API is beta. -// // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/current/update-trained-model-deployment.html. -type MLUpdateTrainedModelDeployment func(body io.Reader, model_id string, o ...func(*MLUpdateTrainedModelDeploymentRequest)) (*Response, error) +type MLUpdateTrainedModelDeployment func(model_id string, o ...func(*MLUpdateTrainedModelDeploymentRequest)) (*Response, error) // MLUpdateTrainedModelDeploymentRequest configures the ML Update Trained Model Deployment API request. type MLUpdateTrainedModelDeploymentRequest struct { @@ -56,6 +55,8 @@ type MLUpdateTrainedModelDeploymentRequest struct { ModelID string + NumberOfAllocations *int + Pretty bool Human bool ErrorTrace bool @@ -105,6 +106,10 @@ func (r MLUpdateTrainedModelDeploymentRequest) Do(providedCtx context.Context, t params = make(map[string]string) + if r.NumberOfAllocations != nil { + params["number_of_allocations"] = strconv.FormatInt(int64(*r.NumberOfAllocations), 10) + } + if r.Pretty { params["pretty"] = "true" } @@ -190,6 +195,20 @@ func (f MLUpdateTrainedModelDeployment) WithContext(v context.Context) func(*MLU } } +// WithBody - The updated trained model deployment settings. +func (f MLUpdateTrainedModelDeployment) WithBody(v io.Reader) func(*MLUpdateTrainedModelDeploymentRequest) { + return func(r *MLUpdateTrainedModelDeploymentRequest) { + r.Body = v + } +} + +// WithNumberOfAllocations - update the model deployment to this number of allocations.. +func (f MLUpdateTrainedModelDeployment) WithNumberOfAllocations(v int) func(*MLUpdateTrainedModelDeploymentRequest) { + return func(r *MLUpdateTrainedModelDeploymentRequest) { + r.NumberOfAllocations = &v + } +} + // WithPretty makes the response body pretty-printed. func (f MLUpdateTrainedModelDeployment) WithPretty() func(*MLUpdateTrainedModelDeploymentRequest) { return func(r *MLUpdateTrainedModelDeploymentRequest) { diff --git a/esapi/api.xpack.ml.upgrade_job_snapshot.go b/esapi/api.xpack.ml.upgrade_job_snapshot.go index d143298b22..4b8eda6cca 100644 --- a/esapi/api.xpack.ml.upgrade_job_snapshot.go +++ b/esapi/api.xpack.ml.upgrade_job_snapshot.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.validate.go b/esapi/api.xpack.ml.validate.go index 7ecf3581f3..c37b8bfc54 100644 --- a/esapi/api.xpack.ml.validate.go +++ b/esapi/api.xpack.ml.validate.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ml.validate_detector.go b/esapi/api.xpack.ml.validate_detector.go index e00686a755..8319e9b7d0 100644 --- a/esapi/api.xpack.ml.validate_detector.go +++ b/esapi/api.xpack.ml.validate_detector.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.monitoring.bulk.go b/esapi/api.xpack.monitoring.bulk.go index 1b8a7a9161..e118604c85 100644 --- a/esapi/api.xpack.monitoring.bulk.go +++ b/esapi/api.xpack.monitoring.bulk.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.open_point_in_time.go b/esapi/api.xpack.open_point_in_time.go index 4686175d83..1a655771c4 100644 --- a/esapi/api.xpack.open_point_in_time.go +++ b/esapi/api.xpack.open_point_in_time.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.profiling.flamegraph.go b/esapi/api.xpack.profiling.flamegraph.go index 65192cd823..1a32904ad3 100644 --- a/esapi/api.xpack.profiling.flamegraph.go +++ b/esapi/api.xpack.profiling.flamegraph.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.rollup.delete_job.go b/esapi/api.xpack.rollup.delete_job.go index 3cbda6e7b7..e0a42cd65c 100644 --- a/esapi/api.xpack.rollup.delete_job.go +++ b/esapi/api.xpack.rollup.delete_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.rollup.get_jobs.go b/esapi/api.xpack.rollup.get_jobs.go index 58948907ad..037dd62d0f 100644 --- a/esapi/api.xpack.rollup.get_jobs.go +++ b/esapi/api.xpack.rollup.get_jobs.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.rollup.get_rollup_caps.go b/esapi/api.xpack.rollup.get_rollup_caps.go index 47764467d3..38d827dd04 100644 --- a/esapi/api.xpack.rollup.get_rollup_caps.go +++ b/esapi/api.xpack.rollup.get_rollup_caps.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.rollup.get_rollup_index_caps.go b/esapi/api.xpack.rollup.get_rollup_index_caps.go index c20ec1276b..3c45fd0138 100644 --- a/esapi/api.xpack.rollup.get_rollup_index_caps.go +++ b/esapi/api.xpack.rollup.get_rollup_index_caps.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.rollup.put_job.go b/esapi/api.xpack.rollup.put_job.go index 8e0bd10943..62242b5f7b 100644 --- a/esapi/api.xpack.rollup.put_job.go +++ b/esapi/api.xpack.rollup.put_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.rollup.rollup_search.go b/esapi/api.xpack.rollup.rollup_search.go index b8503737c2..524c36bcc0 100644 --- a/esapi/api.xpack.rollup.rollup_search.go +++ b/esapi/api.xpack.rollup.rollup_search.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.rollup.start_job.go b/esapi/api.xpack.rollup.start_job.go index 794764e4ba..2a4ca057c7 100644 --- a/esapi/api.xpack.rollup.start_job.go +++ b/esapi/api.xpack.rollup.start_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.rollup.stop_job.go b/esapi/api.xpack.rollup.stop_job.go index 46bbbe00b2..47cad52c6b 100644 --- a/esapi/api.xpack.rollup.stop_job.go +++ b/esapi/api.xpack.rollup.stop_job.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.searchable_snapshots.cache_stats.go b/esapi/api.xpack.searchable_snapshots.cache_stats.go index 3c2b550564..d9c7f301d9 100644 --- a/esapi/api.xpack.searchable_snapshots.cache_stats.go +++ b/esapi/api.xpack.searchable_snapshots.cache_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.searchable_snapshots.clear_cache.go b/esapi/api.xpack.searchable_snapshots.clear_cache.go index 51d7a8f5b9..2c69b3c1be 100644 --- a/esapi/api.xpack.searchable_snapshots.clear_cache.go +++ b/esapi/api.xpack.searchable_snapshots.clear_cache.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.searchable_snapshots.mount.go b/esapi/api.xpack.searchable_snapshots.mount.go index 20fc6cffaf..5f8c046f15 100644 --- a/esapi/api.xpack.searchable_snapshots.mount.go +++ b/esapi/api.xpack.searchable_snapshots.mount.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.searchable_snapshots.stats.go b/esapi/api.xpack.searchable_snapshots.stats.go index 5a664764eb..2ea0c995e0 100644 --- a/esapi/api.xpack.searchable_snapshots.stats.go +++ b/esapi/api.xpack.searchable_snapshots.stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.activate_user_profile.go b/esapi/api.xpack.security.activate_user_profile.go index e7f8976d8b..e1b41309eb 100644 --- a/esapi/api.xpack.security.activate_user_profile.go +++ b/esapi/api.xpack.security.activate_user_profile.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.authenticate.go b/esapi/api.xpack.security.authenticate.go index 08b89392f5..ed35cc9754 100644 --- a/esapi/api.xpack.security.authenticate.go +++ b/esapi/api.xpack.security.authenticate.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.bulk_put_role.go b/esapi/api.xpack.security.bulk_put_role.go new file mode 100644 index 0000000000..1d410ccf38 --- /dev/null +++ b/esapi/api.xpack.security.bulk_put_role.go @@ -0,0 +1,238 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.15.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newSecurityBulkPutRoleFunc(t Transport) SecurityBulkPutRole { + return func(body io.Reader, o ...func(*SecurityBulkPutRoleRequest)) (*Response, error) { + var r = SecurityBulkPutRoleRequest{Body: body} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// SecurityBulkPutRole - Bulk adds and updates roles in the native realm. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-bulk-put-role.html. +type SecurityBulkPutRole func(body io.Reader, o ...func(*SecurityBulkPutRoleRequest)) (*Response, error) + +// SecurityBulkPutRoleRequest configures the Security Bulk Put Role API request. +type SecurityBulkPutRoleRequest struct { + Body io.Reader + + Refresh string + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r SecurityBulkPutRoleRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "security.bulk_put_role") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "POST" + + path.Grow(7 + len("/_security/role")) + path.WriteString("http://") + path.WriteString("/_security/role") + + params = make(map[string]string) + + if r.Refresh != "" { + params["refresh"] = r.Refresh + } + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "security.bulk_put_role") + if reader := instrument.RecordRequestBody(ctx, "security.bulk_put_role", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "security.bulk_put_role") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f SecurityBulkPutRole) WithContext(v context.Context) func(*SecurityBulkPutRoleRequest) { + return func(r *SecurityBulkPutRoleRequest) { + r.ctx = v + } +} + +// WithRefresh - if `true` (the default) then refresh the affected shards to make this operation visible to search, if `wait_for` then wait for a refresh to make this operation visible to search, if `false` then do nothing with refreshes.. +func (f SecurityBulkPutRole) WithRefresh(v string) func(*SecurityBulkPutRoleRequest) { + return func(r *SecurityBulkPutRoleRequest) { + r.Refresh = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f SecurityBulkPutRole) WithPretty() func(*SecurityBulkPutRoleRequest) { + return func(r *SecurityBulkPutRoleRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f SecurityBulkPutRole) WithHuman() func(*SecurityBulkPutRoleRequest) { + return func(r *SecurityBulkPutRoleRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f SecurityBulkPutRole) WithErrorTrace() func(*SecurityBulkPutRoleRequest) { + return func(r *SecurityBulkPutRoleRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f SecurityBulkPutRole) WithFilterPath(v ...string) func(*SecurityBulkPutRoleRequest) { + return func(r *SecurityBulkPutRoleRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f SecurityBulkPutRole) WithHeader(h map[string]string) func(*SecurityBulkPutRoleRequest) { + return func(r *SecurityBulkPutRoleRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f SecurityBulkPutRole) WithOpaqueID(s string) func(*SecurityBulkPutRoleRequest) { + return func(r *SecurityBulkPutRoleRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.xpack.security.bulk_update_api_keys.go b/esapi/api.xpack.security.bulk_update_api_keys.go index 16248fbd81..f69330cda4 100644 --- a/esapi/api.xpack.security.bulk_update_api_keys.go +++ b/esapi/api.xpack.security.bulk_update_api_keys.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.change_password.go b/esapi/api.xpack.security.change_password.go index a8595f4615..37ad40309b 100644 --- a/esapi/api.xpack.security.change_password.go +++ b/esapi/api.xpack.security.change_password.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.clear_api_key_cache.go b/esapi/api.xpack.security.clear_api_key_cache.go index 62e0c7eaef..297abf3268 100644 --- a/esapi/api.xpack.security.clear_api_key_cache.go +++ b/esapi/api.xpack.security.clear_api_key_cache.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.clear_cached_privileges.go b/esapi/api.xpack.security.clear_cached_privileges.go index 9e167dc9d2..ef16182315 100644 --- a/esapi/api.xpack.security.clear_cached_privileges.go +++ b/esapi/api.xpack.security.clear_cached_privileges.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.clear_cached_realms.go b/esapi/api.xpack.security.clear_cached_realms.go index c23acb9456..f5b8fad737 100644 --- a/esapi/api.xpack.security.clear_cached_realms.go +++ b/esapi/api.xpack.security.clear_cached_realms.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.clear_cached_roles.go b/esapi/api.xpack.security.clear_cached_roles.go index 415ff4b535..9bac0d9a40 100644 --- a/esapi/api.xpack.security.clear_cached_roles.go +++ b/esapi/api.xpack.security.clear_cached_roles.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.clear_cached_service_tokens.go b/esapi/api.xpack.security.clear_cached_service_tokens.go index aedf4049e0..8e983af4ef 100644 --- a/esapi/api.xpack.security.clear_cached_service_tokens.go +++ b/esapi/api.xpack.security.clear_cached_service_tokens.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.create_api_key.go b/esapi/api.xpack.security.create_api_key.go index 6fce608d95..0283914d77 100644 --- a/esapi/api.xpack.security.create_api_key.go +++ b/esapi/api.xpack.security.create_api_key.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.create_cross_cluster_api_key.go b/esapi/api.xpack.security.create_cross_cluster_api_key.go index c01210e2f2..626e9625ac 100644 --- a/esapi/api.xpack.security.create_cross_cluster_api_key.go +++ b/esapi/api.xpack.security.create_cross_cluster_api_key.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.create_service_token.go b/esapi/api.xpack.security.create_service_token.go index 46dd57afa9..5dad9acbc4 100644 --- a/esapi/api.xpack.security.create_service_token.go +++ b/esapi/api.xpack.security.create_service_token.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -84,7 +84,11 @@ func (r SecurityCreateServiceTokenRequest) Do(providedCtx context.Context, trans ctx = providedCtx } - method = "PUT" + if r.Name != "" { + method = "POST" + } else { + method = "PUT" + } path.Grow(7 + 1 + len("_security") + 1 + len("service") + 1 + len(r.Namespace) + 1 + len(r.Service) + 1 + len("credential") + 1 + len("token") + 1 + len(r.Name)) path.WriteString("http://") diff --git a/esapi/api.xpack.security.delete_privileges.go b/esapi/api.xpack.security.delete_privileges.go index 40d73f0a64..75c025d249 100644 --- a/esapi/api.xpack.security.delete_privileges.go +++ b/esapi/api.xpack.security.delete_privileges.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.delete_role.go b/esapi/api.xpack.security.delete_role.go index 951d68c534..9c883ec9cd 100644 --- a/esapi/api.xpack.security.delete_role.go +++ b/esapi/api.xpack.security.delete_role.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.delete_role_mapping.go b/esapi/api.xpack.security.delete_role_mapping.go index 5461df38da..ca363013a9 100644 --- a/esapi/api.xpack.security.delete_role_mapping.go +++ b/esapi/api.xpack.security.delete_role_mapping.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.delete_service_token.go b/esapi/api.xpack.security.delete_service_token.go index 9b7ee6e079..86458a8876 100644 --- a/esapi/api.xpack.security.delete_service_token.go +++ b/esapi/api.xpack.security.delete_service_token.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.delete_user.go b/esapi/api.xpack.security.delete_user.go index dd2478a6d0..c00b5e536a 100644 --- a/esapi/api.xpack.security.delete_user.go +++ b/esapi/api.xpack.security.delete_user.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.disable_user.go b/esapi/api.xpack.security.disable_user.go index 93a443e592..327438caea 100644 --- a/esapi/api.xpack.security.disable_user.go +++ b/esapi/api.xpack.security.disable_user.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.disable_user_profile.go b/esapi/api.xpack.security.disable_user_profile.go index 3689b7316c..d32cb442d9 100644 --- a/esapi/api.xpack.security.disable_user_profile.go +++ b/esapi/api.xpack.security.disable_user_profile.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.enable_user.go b/esapi/api.xpack.security.enable_user.go index cfd28d8af0..a61f9cdd4a 100644 --- a/esapi/api.xpack.security.enable_user.go +++ b/esapi/api.xpack.security.enable_user.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.enable_user_profile.go b/esapi/api.xpack.security.enable_user_profile.go index 4f183a812e..b77ea7e711 100644 --- a/esapi/api.xpack.security.enable_user_profile.go +++ b/esapi/api.xpack.security.enable_user_profile.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.enroll_kibana.go b/esapi/api.xpack.security.enroll_kibana.go index 9a9559eef1..8d22411504 100644 --- a/esapi/api.xpack.security.enroll_kibana.go +++ b/esapi/api.xpack.security.enroll_kibana.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.enroll_node.go b/esapi/api.xpack.security.enroll_node.go index b3ccaa7833..43f32e3b2c 100644 --- a/esapi/api.xpack.security.enroll_node.go +++ b/esapi/api.xpack.security.enroll_node.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_api_key.go b/esapi/api.xpack.security.get_api_key.go index 64c7b0e4fa..32c2df2479 100644 --- a/esapi/api.xpack.security.get_api_key.go +++ b/esapi/api.xpack.security.get_api_key.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -50,13 +50,14 @@ type SecurityGetAPIKey func(o ...func(*SecurityGetAPIKeyRequest)) (*Response, er // SecurityGetAPIKeyRequest configures the Security GetAPI Key API request. type SecurityGetAPIKeyRequest struct { - ActiveOnly *bool - ID string - Name string - Owner *bool - RealmName string - Username string - WithLimitedBy *bool + ActiveOnly *bool + ID string + Name string + Owner *bool + RealmName string + Username string + WithLimitedBy *bool + WithProfileUID *bool Pretty bool Human bool @@ -123,6 +124,10 @@ func (r SecurityGetAPIKeyRequest) Do(providedCtx context.Context, transport Tran params["with_limited_by"] = strconv.FormatBool(*r.WithLimitedBy) } + if r.WithProfileUID != nil { + params["with_profile_uid"] = strconv.FormatBool(*r.WithProfileUID) + } + if r.Pretty { params["pretty"] = "true" } @@ -250,6 +255,13 @@ func (f SecurityGetAPIKey) WithWithLimitedBy(v bool) func(*SecurityGetAPIKeyRequ } } +// WithWithProfileUID - flag to also retrieve the api key's owner profile uid, if it exists. +func (f SecurityGetAPIKey) WithWithProfileUID(v bool) func(*SecurityGetAPIKeyRequest) { + return func(r *SecurityGetAPIKeyRequest) { + r.WithProfileUID = &v + } +} + // WithPretty makes the response body pretty-printed. func (f SecurityGetAPIKey) WithPretty() func(*SecurityGetAPIKeyRequest) { return func(r *SecurityGetAPIKeyRequest) { diff --git a/esapi/api.xpack.security.get_builtin_privileges.go b/esapi/api.xpack.security.get_builtin_privileges.go index 150e358a2a..9031a8b15a 100644 --- a/esapi/api.xpack.security.get_builtin_privileges.go +++ b/esapi/api.xpack.security.get_builtin_privileges.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_privileges.go b/esapi/api.xpack.security.get_privileges.go index 2166306581..6c92241fa7 100644 --- a/esapi/api.xpack.security.get_privileges.go +++ b/esapi/api.xpack.security.get_privileges.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_role.go b/esapi/api.xpack.security.get_role.go index a99ab5d669..c25cc8a7be 100644 --- a/esapi/api.xpack.security.get_role.go +++ b/esapi/api.xpack.security.get_role.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_role_mapping.go b/esapi/api.xpack.security.get_role_mapping.go index c0299e0e70..42d4756e56 100644 --- a/esapi/api.xpack.security.get_role_mapping.go +++ b/esapi/api.xpack.security.get_role_mapping.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_service_accounts.go b/esapi/api.xpack.security.get_service_accounts.go index f8d6dd6b1e..d3373e45a3 100644 --- a/esapi/api.xpack.security.get_service_accounts.go +++ b/esapi/api.xpack.security.get_service_accounts.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_service_credentials.go b/esapi/api.xpack.security.get_service_credentials.go index e44d76cef2..f84c8a4122 100644 --- a/esapi/api.xpack.security.get_service_credentials.go +++ b/esapi/api.xpack.security.get_service_credentials.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_settings.go b/esapi/api.xpack.security.get_settings.go index 305762deec..6646256842 100644 --- a/esapi/api.xpack.security.get_settings.go +++ b/esapi/api.xpack.security.get_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newSecurityGetSettingsFunc(t Transport) SecurityGetSettings { @@ -49,6 +50,8 @@ type SecurityGetSettings func(o ...func(*SecurityGetSettingsRequest)) (*Response // SecurityGetSettingsRequest configures the Security Get Settings API request. type SecurityGetSettingsRequest struct { + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -86,6 +89,10 @@ func (r SecurityGetSettingsRequest) Do(providedCtx context.Context, transport Tr params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -164,6 +171,13 @@ func (f SecurityGetSettings) WithContext(v context.Context) func(*SecurityGetSet } } +// WithMasterTimeout - timeout for connection to master. +func (f SecurityGetSettings) WithMasterTimeout(v time.Duration) func(*SecurityGetSettingsRequest) { + return func(r *SecurityGetSettingsRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f SecurityGetSettings) WithPretty() func(*SecurityGetSettingsRequest) { return func(r *SecurityGetSettingsRequest) { diff --git a/esapi/api.xpack.security.get_token.go b/esapi/api.xpack.security.get_token.go index d04994024a..694b9f5bbc 100644 --- a/esapi/api.xpack.security.get_token.go +++ b/esapi/api.xpack.security.get_token.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_user.go b/esapi/api.xpack.security.get_user.go index cad6440667..f77081c002 100644 --- a/esapi/api.xpack.security.get_user.go +++ b/esapi/api.xpack.security.get_user.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_user_privileges.go b/esapi/api.xpack.security.get_user_privileges.go index 424eba52c3..991d0a1254 100644 --- a/esapi/api.xpack.security.get_user_privileges.go +++ b/esapi/api.xpack.security.get_user_privileges.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.get_user_profile.go b/esapi/api.xpack.security.get_user_profile.go index 9fdede7bb2..41a1331abc 100644 --- a/esapi/api.xpack.security.get_user_profile.go +++ b/esapi/api.xpack.security.get_user_profile.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.grant_api_key.go b/esapi/api.xpack.security.grant_api_key.go index 6e9994a760..6fa939574a 100644 --- a/esapi/api.xpack.security.grant_api_key.go +++ b/esapi/api.xpack.security.grant_api_key.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.has_privileges.go b/esapi/api.xpack.security.has_privileges.go index 2d619da2be..3556bdb063 100644 --- a/esapi/api.xpack.security.has_privileges.go +++ b/esapi/api.xpack.security.has_privileges.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.has_privileges_user_profile.go b/esapi/api.xpack.security.has_privileges_user_profile.go index dee1e51d0a..eb89dafda5 100644 --- a/esapi/api.xpack.security.has_privileges_user_profile.go +++ b/esapi/api.xpack.security.has_privileges_user_profile.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.invalidate_api_key.go b/esapi/api.xpack.security.invalidate_api_key.go index b04072abe7..927ad263f5 100644 --- a/esapi/api.xpack.security.invalidate_api_key.go +++ b/esapi/api.xpack.security.invalidate_api_key.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.invalidate_token.go b/esapi/api.xpack.security.invalidate_token.go index 5616273564..16c492fac5 100644 --- a/esapi/api.xpack.security.invalidate_token.go +++ b/esapi/api.xpack.security.invalidate_token.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.oidc_authenticate.go b/esapi/api.xpack.security.oidc_authenticate.go index 1c740bdb5b..a41284c811 100644 --- a/esapi/api.xpack.security.oidc_authenticate.go +++ b/esapi/api.xpack.security.oidc_authenticate.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.oidc_logout.go b/esapi/api.xpack.security.oidc_logout.go index 81fa6e28a1..7fb1294bf6 100644 --- a/esapi/api.xpack.security.oidc_logout.go +++ b/esapi/api.xpack.security.oidc_logout.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.oidc_prepare_authentication.go b/esapi/api.xpack.security.oidc_prepare_authentication.go index ddec3997bc..104079f498 100644 --- a/esapi/api.xpack.security.oidc_prepare_authentication.go +++ b/esapi/api.xpack.security.oidc_prepare_authentication.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.put_privileges.go b/esapi/api.xpack.security.put_privileges.go index 2d955484c0..23c6303905 100644 --- a/esapi/api.xpack.security.put_privileges.go +++ b/esapi/api.xpack.security.put_privileges.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -85,9 +85,9 @@ func (r SecurityPutPrivilegesRequest) Do(providedCtx context.Context, transport method = "PUT" - path.Grow(7 + len("/_security/privilege/")) + path.Grow(7 + len("/_security/privilege")) path.WriteString("http://") - path.WriteString("/_security/privilege/") + path.WriteString("/_security/privilege") params = make(map[string]string) diff --git a/esapi/api.xpack.security.put_role.go b/esapi/api.xpack.security.put_role.go index 87ca84e05e..ad81b7eb12 100644 --- a/esapi/api.xpack.security.put_role.go +++ b/esapi/api.xpack.security.put_role.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.put_role_mapping.go b/esapi/api.xpack.security.put_role_mapping.go index f9313cee47..3bd2bd4dd1 100644 --- a/esapi/api.xpack.security.put_role_mapping.go +++ b/esapi/api.xpack.security.put_role_mapping.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.put_user.go b/esapi/api.xpack.security.put_user.go index 90ae9013c9..a155afb603 100644 --- a/esapi/api.xpack.security.put_user.go +++ b/esapi/api.xpack.security.put_user.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.query_api_keys.go b/esapi/api.xpack.security.query_api_keys.go index cd3da94c3f..fa26fce632 100644 --- a/esapi/api.xpack.security.query_api_keys.go +++ b/esapi/api.xpack.security.query_api_keys.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -53,7 +53,9 @@ type SecurityQueryAPIKeys func(o ...func(*SecurityQueryAPIKeysRequest)) (*Respon type SecurityQueryAPIKeysRequest struct { Body io.Reader - WithLimitedBy *bool + TypedKeys *bool + WithLimitedBy *bool + WithProfileUID *bool Pretty bool Human bool @@ -92,10 +94,18 @@ func (r SecurityQueryAPIKeysRequest) Do(providedCtx context.Context, transport T params = make(map[string]string) + if r.TypedKeys != nil { + params["typed_keys"] = strconv.FormatBool(*r.TypedKeys) + } + if r.WithLimitedBy != nil { params["with_limited_by"] = strconv.FormatBool(*r.WithLimitedBy) } + if r.WithProfileUID != nil { + params["with_profile_uid"] = strconv.FormatBool(*r.WithProfileUID) + } + if r.Pretty { params["pretty"] = "true" } @@ -188,6 +198,13 @@ func (f SecurityQueryAPIKeys) WithBody(v io.Reader) func(*SecurityQueryAPIKeysRe } } +// WithTypedKeys - flag to prefix aggregation names by their respective types in the response. +func (f SecurityQueryAPIKeys) WithTypedKeys(v bool) func(*SecurityQueryAPIKeysRequest) { + return func(r *SecurityQueryAPIKeysRequest) { + r.TypedKeys = &v + } +} + // WithWithLimitedBy - flag to show the limited-by role descriptors of api keys. func (f SecurityQueryAPIKeys) WithWithLimitedBy(v bool) func(*SecurityQueryAPIKeysRequest) { return func(r *SecurityQueryAPIKeysRequest) { @@ -195,6 +212,13 @@ func (f SecurityQueryAPIKeys) WithWithLimitedBy(v bool) func(*SecurityQueryAPIKe } } +// WithWithProfileUID - flag to also retrieve the api key's owner profile uid, if it exists. +func (f SecurityQueryAPIKeys) WithWithProfileUID(v bool) func(*SecurityQueryAPIKeysRequest) { + return func(r *SecurityQueryAPIKeysRequest) { + r.WithProfileUID = &v + } +} + // WithPretty makes the response body pretty-printed. func (f SecurityQueryAPIKeys) WithPretty() func(*SecurityQueryAPIKeysRequest) { return func(r *SecurityQueryAPIKeysRequest) { diff --git a/esapi/api.xpack.security.query_role.go b/esapi/api.xpack.security.query_role.go new file mode 100644 index 0000000000..8c35accc8e --- /dev/null +++ b/esapi/api.xpack.security.query_role.go @@ -0,0 +1,232 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.15.0: DO NOT EDIT + +package esapi + +import ( + "context" + "io" + "net/http" + "strings" +) + +func newSecurityQueryRoleFunc(t Transport) SecurityQueryRole { + return func(o ...func(*SecurityQueryRoleRequest)) (*Response, error) { + var r = SecurityQueryRoleRequest{} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// SecurityQueryRole - Retrieves information for Roles using a subset of query DSL +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-query-role.html. +type SecurityQueryRole func(o ...func(*SecurityQueryRoleRequest)) (*Response, error) + +// SecurityQueryRoleRequest configures the Security Query Role API request. +type SecurityQueryRoleRequest struct { + Body io.Reader + + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r SecurityQueryRoleRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "security.query_role") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "POST" + + path.Grow(7 + len("/_security/_query/role")) + path.WriteString("http://") + path.WriteString("/_security/_query/role") + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), r.Body) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if r.Body != nil && req.Header.Get(headerContentType) == "" { + req.Header[headerContentType] = headerContentTypeJSON + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "security.query_role") + if reader := instrument.RecordRequestBody(ctx, "security.query_role", r.Body); reader != nil { + req.Body = reader + } + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "security.query_role") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f SecurityQueryRole) WithContext(v context.Context) func(*SecurityQueryRoleRequest) { + return func(r *SecurityQueryRoleRequest) { + r.ctx = v + } +} + +// WithBody - From, size, query, sort and search_after. +func (f SecurityQueryRole) WithBody(v io.Reader) func(*SecurityQueryRoleRequest) { + return func(r *SecurityQueryRoleRequest) { + r.Body = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f SecurityQueryRole) WithPretty() func(*SecurityQueryRoleRequest) { + return func(r *SecurityQueryRoleRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f SecurityQueryRole) WithHuman() func(*SecurityQueryRoleRequest) { + return func(r *SecurityQueryRoleRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f SecurityQueryRole) WithErrorTrace() func(*SecurityQueryRoleRequest) { + return func(r *SecurityQueryRoleRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f SecurityQueryRole) WithFilterPath(v ...string) func(*SecurityQueryRoleRequest) { + return func(r *SecurityQueryRoleRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f SecurityQueryRole) WithHeader(h map[string]string) func(*SecurityQueryRoleRequest) { + return func(r *SecurityQueryRoleRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f SecurityQueryRole) WithOpaqueID(s string) func(*SecurityQueryRoleRequest) { + return func(r *SecurityQueryRoleRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.xpack.security.query_user.go b/esapi/api.xpack.security.query_user.go index 99746bf7c1..aa31295a85 100644 --- a/esapi/api.xpack.security.query_user.go +++ b/esapi/api.xpack.security.query_user.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.saml_authenticate.go b/esapi/api.xpack.security.saml_authenticate.go index ba4d41fba5..ec89dc5249 100644 --- a/esapi/api.xpack.security.saml_authenticate.go +++ b/esapi/api.xpack.security.saml_authenticate.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.saml_complete_logout.go b/esapi/api.xpack.security.saml_complete_logout.go index 4c62869ffa..78c887bdd7 100644 --- a/esapi/api.xpack.security.saml_complete_logout.go +++ b/esapi/api.xpack.security.saml_complete_logout.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.saml_invalidate.go b/esapi/api.xpack.security.saml_invalidate.go index 7941e86e8b..7907abe8e0 100644 --- a/esapi/api.xpack.security.saml_invalidate.go +++ b/esapi/api.xpack.security.saml_invalidate.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.saml_logout.go b/esapi/api.xpack.security.saml_logout.go index 5f7b6e934d..7fa66cd55c 100644 --- a/esapi/api.xpack.security.saml_logout.go +++ b/esapi/api.xpack.security.saml_logout.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.saml_prepare_authentication.go b/esapi/api.xpack.security.saml_prepare_authentication.go index 3cb5e33751..06cf0b482e 100644 --- a/esapi/api.xpack.security.saml_prepare_authentication.go +++ b/esapi/api.xpack.security.saml_prepare_authentication.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.saml_service_provider_metadata.go b/esapi/api.xpack.security.saml_service_provider_metadata.go index 434b73ca79..99bd572179 100644 --- a/esapi/api.xpack.security.saml_service_provider_metadata.go +++ b/esapi/api.xpack.security.saml_service_provider_metadata.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.suggest_user_profiles.go b/esapi/api.xpack.security.suggest_user_profiles.go index 59e065d557..60203d7821 100644 --- a/esapi/api.xpack.security.suggest_user_profiles.go +++ b/esapi/api.xpack.security.suggest_user_profiles.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.update_api_key.go b/esapi/api.xpack.security.update_api_key.go index 175fb4b9d2..a9f39c49b9 100644 --- a/esapi/api.xpack.security.update_api_key.go +++ b/esapi/api.xpack.security.update_api_key.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.update_cross_cluster_api_key.go b/esapi/api.xpack.security.update_cross_cluster_api_key.go index 46f405f692..acce0439fa 100644 --- a/esapi/api.xpack.security.update_cross_cluster_api_key.go +++ b/esapi/api.xpack.security.update_cross_cluster_api_key.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.security.update_settings.go b/esapi/api.xpack.security.update_settings.go index 380fe76f2c..672ee890ab 100644 --- a/esapi/api.xpack.security.update_settings.go +++ b/esapi/api.xpack.security.update_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -24,6 +24,7 @@ import ( "io" "net/http" "strings" + "time" ) func newSecurityUpdateSettingsFunc(t Transport) SecurityUpdateSettings { @@ -52,6 +53,9 @@ type SecurityUpdateSettings func(body io.Reader, o ...func(*SecurityUpdateSettin type SecurityUpdateSettingsRequest struct { Body io.Reader + MasterTimeout time.Duration + Timeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -89,6 +93,14 @@ func (r SecurityUpdateSettingsRequest) Do(providedCtx context.Context, transport params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + + if r.Timeout != 0 { + params["timeout"] = formatDuration(r.Timeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -174,6 +186,20 @@ func (f SecurityUpdateSettings) WithContext(v context.Context) func(*SecurityUpd } } +// WithMasterTimeout - timeout for connection to master. +func (f SecurityUpdateSettings) WithMasterTimeout(v time.Duration) func(*SecurityUpdateSettingsRequest) { + return func(r *SecurityUpdateSettingsRequest) { + r.MasterTimeout = v + } +} + +// WithTimeout - timeout for acknowledgements from all nodes. +func (f SecurityUpdateSettings) WithTimeout(v time.Duration) func(*SecurityUpdateSettingsRequest) { + return func(r *SecurityUpdateSettingsRequest) { + r.Timeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f SecurityUpdateSettings) WithPretty() func(*SecurityUpdateSettingsRequest) { return func(r *SecurityUpdateSettingsRequest) { diff --git a/esapi/api.xpack.security.update_user_profile_data.go b/esapi/api.xpack.security.update_user_profile_data.go index 4b387d5a11..699ea576a6 100644 --- a/esapi/api.xpack.security.update_user_profile_data.go +++ b/esapi/api.xpack.security.update_user_profile_data.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.delete_lifecycle.go b/esapi/api.xpack.slm.delete_lifecycle.go index 4065375fa6..8a0f9c4e76 100644 --- a/esapi/api.xpack.slm.delete_lifecycle.go +++ b/esapi/api.xpack.slm.delete_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.execute_lifecycle.go b/esapi/api.xpack.slm.execute_lifecycle.go index 8075665e5c..e01db94a16 100644 --- a/esapi/api.xpack.slm.execute_lifecycle.go +++ b/esapi/api.xpack.slm.execute_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.execute_retention.go b/esapi/api.xpack.slm.execute_retention.go index f3c3cc3c43..57f67aa388 100644 --- a/esapi/api.xpack.slm.execute_retention.go +++ b/esapi/api.xpack.slm.execute_retention.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.get_lifecycle.go b/esapi/api.xpack.slm.get_lifecycle.go index c5352af2d1..b6bbe74707 100644 --- a/esapi/api.xpack.slm.get_lifecycle.go +++ b/esapi/api.xpack.slm.get_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.get_stats.go b/esapi/api.xpack.slm.get_stats.go index 266b886170..7f5dbe6151 100644 --- a/esapi/api.xpack.slm.get_stats.go +++ b/esapi/api.xpack.slm.get_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.get_status.go b/esapi/api.xpack.slm.get_status.go index 2bb8dab6fe..5698accf81 100644 --- a/esapi/api.xpack.slm.get_status.go +++ b/esapi/api.xpack.slm.get_status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.put_lifecycle.go b/esapi/api.xpack.slm.put_lifecycle.go index d0af4bec3a..bf9cc75ed2 100644 --- a/esapi/api.xpack.slm.put_lifecycle.go +++ b/esapi/api.xpack.slm.put_lifecycle.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.slm.start.go b/esapi/api.xpack.slm.start.go index 0a96dcfa03..9e3d5a4f80 100644 --- a/esapi/api.xpack.slm.start.go +++ b/esapi/api.xpack.slm.start.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newSlmStartFunc(t Transport) SlmStart { @@ -49,6 +50,9 @@ type SlmStart func(o ...func(*SlmStartRequest)) (*Response, error) // SlmStartRequest configures the Slm Start API request. type SlmStartRequest struct { + MasterTimeout time.Duration + Timeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -86,6 +90,14 @@ func (r SlmStartRequest) Do(providedCtx context.Context, transport Transport) (* params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + + if r.Timeout != 0 { + params["timeout"] = formatDuration(r.Timeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -164,6 +176,20 @@ func (f SlmStart) WithContext(v context.Context) func(*SlmStartRequest) { } } +// WithMasterTimeout - timeout for processing on master node. +func (f SlmStart) WithMasterTimeout(v time.Duration) func(*SlmStartRequest) { + return func(r *SlmStartRequest) { + r.MasterTimeout = v + } +} + +// WithTimeout - timeout for acknowledgement of update from all nodes in cluster. +func (f SlmStart) WithTimeout(v time.Duration) func(*SlmStartRequest) { + return func(r *SlmStartRequest) { + r.Timeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f SlmStart) WithPretty() func(*SlmStartRequest) { return func(r *SlmStartRequest) { diff --git a/esapi/api.xpack.slm.stop.go b/esapi/api.xpack.slm.stop.go index 512e139da2..1275af8398 100644 --- a/esapi/api.xpack.slm.stop.go +++ b/esapi/api.xpack.slm.stop.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newSlmStopFunc(t Transport) SlmStop { @@ -49,6 +50,9 @@ type SlmStop func(o ...func(*SlmStopRequest)) (*Response, error) // SlmStopRequest configures the Slm Stop API request. type SlmStopRequest struct { + MasterTimeout time.Duration + Timeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -86,6 +90,14 @@ func (r SlmStopRequest) Do(providedCtx context.Context, transport Transport) (*R params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + + if r.Timeout != 0 { + params["timeout"] = formatDuration(r.Timeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -164,6 +176,20 @@ func (f SlmStop) WithContext(v context.Context) func(*SlmStopRequest) { } } +// WithMasterTimeout - timeout for processing on master node. +func (f SlmStop) WithMasterTimeout(v time.Duration) func(*SlmStopRequest) { + return func(r *SlmStopRequest) { + r.MasterTimeout = v + } +} + +// WithTimeout - timeout for acknowledgement of update from all nodes in cluster. +func (f SlmStop) WithTimeout(v time.Duration) func(*SlmStopRequest) { + return func(r *SlmStopRequest) { + r.Timeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f SlmStop) WithPretty() func(*SlmStopRequest) { return func(r *SlmStopRequest) { diff --git a/esapi/api.xpack.sql.clear_cursor.go b/esapi/api.xpack.sql.clear_cursor.go index 647d8d47fd..675be47472 100644 --- a/esapi/api.xpack.sql.clear_cursor.go +++ b/esapi/api.xpack.sql.clear_cursor.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.sql.delete_async.go b/esapi/api.xpack.sql.delete_async.go index 81596101b7..f3578c3cbe 100644 --- a/esapi/api.xpack.sql.delete_async.go +++ b/esapi/api.xpack.sql.delete_async.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.sql.get_async.go b/esapi/api.xpack.sql.get_async.go index 7e8e14a7e8..5cfa1d7701 100644 --- a/esapi/api.xpack.sql.get_async.go +++ b/esapi/api.xpack.sql.get_async.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.sql.get_async_status.go b/esapi/api.xpack.sql.get_async_status.go index dc77e73f65..88e9dae041 100644 --- a/esapi/api.xpack.sql.get_async_status.go +++ b/esapi/api.xpack.sql.get_async_status.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.sql.query.go b/esapi/api.xpack.sql.query.go index f80ab0d02d..050afc16ab 100644 --- a/esapi/api.xpack.sql.query.go +++ b/esapi/api.xpack.sql.query.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.sql.translate.go b/esapi/api.xpack.sql.translate.go index 72bd212ced..0ca4ed9d1e 100644 --- a/esapi/api.xpack.sql.translate.go +++ b/esapi/api.xpack.sql.translate.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.ssl.certificates.go b/esapi/api.xpack.ssl.certificates.go index 852121348d..c20d3bc296 100644 --- a/esapi/api.xpack.ssl.certificates.go +++ b/esapi/api.xpack.ssl.certificates.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.text_structure.find_field_structure.go b/esapi/api.xpack.text_structure.find_field_structure.go index 277e58a985..3c9788c94a 100644 --- a/esapi/api.xpack.text_structure.find_field_structure.go +++ b/esapi/api.xpack.text_structure.find_field_structure.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -29,7 +29,7 @@ import ( func newTextStructureFindFieldStructureFunc(t Transport) TextStructureFindFieldStructure { return func(field string, index string, o ...func(*TextStructureFindFieldStructureRequest)) (*Response, error) { - var r = TextStructureFindFieldStructureRequest{Field: field, Index: index} + var r = TextStructureFindFieldStructureRequest{Index: index, Field: field} for _, f := range o { f(&r) } @@ -47,7 +47,7 @@ func newTextStructureFindFieldStructureFunc(t Transport) TextStructureFindFieldS // TextStructureFindFieldStructure - Finds the structure of a text field in an index. // // See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/current/find-field-structure.html. -type TextStructureFindFieldStructure func(field string, index string, o ...func(*TextStructureFindFieldStructureRequest)) (*Response, error) +type TextStructureFindFieldStructure func(index string, field string, o ...func(*TextStructureFindFieldStructureRequest)) (*Response, error) // TextStructureFindFieldStructureRequest configures the Text Structure Find Field Structure API request. type TextStructureFindFieldStructureRequest struct { diff --git a/esapi/api.xpack.text_structure.find_message_structure.go b/esapi/api.xpack.text_structure.find_message_structure.go index 397c29aad4..d2d30b3b9d 100644 --- a/esapi/api.xpack.text_structure.find_message_structure.go +++ b/esapi/api.xpack.text_structure.find_message_structure.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.text_structure.find_structure.go b/esapi/api.xpack.text_structure.find_structure.go index 9862bb680f..1bebb32da9 100644 --- a/esapi/api.xpack.text_structure.find_structure.go +++ b/esapi/api.xpack.text_structure.find_structure.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.text_structure.test_grok_pattern.go b/esapi/api.xpack.text_structure.test_grok_pattern.go index 93b2ce78db..03db27f3ee 100644 --- a/esapi/api.xpack.text_structure.test_grok_pattern.go +++ b/esapi/api.xpack.text_structure.test_grok_pattern.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.delete_transform.go b/esapi/api.xpack.transform.delete_transform.go index 8e6d94a484..96864b4fce 100644 --- a/esapi/api.xpack.transform.delete_transform.go +++ b/esapi/api.xpack.transform.delete_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.get_node_stats.go b/esapi/api.xpack.transform.get_node_stats.go new file mode 100644 index 0000000000..9f4b6519e6 --- /dev/null +++ b/esapi/api.xpack.transform.get_node_stats.go @@ -0,0 +1,215 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +// +// Code generated from specification version 8.15.0: DO NOT EDIT + +package esapi + +import ( + "context" + "net/http" + "strings" +) + +func newTransformGetNodeStatsFunc(t Transport) TransformGetNodeStats { + return func(o ...func(*TransformGetNodeStatsRequest)) (*Response, error) { + var r = TransformGetNodeStatsRequest{} + for _, f := range o { + f(&r) + } + + if transport, ok := t.(Instrumented); ok { + r.instrument = transport.InstrumentationEnabled() + } + + return r.Do(r.ctx, t) + } +} + +// ----- API Definition ------------------------------------------------------- + +// TransformGetNodeStats - Retrieves transform usage information for transform nodes. +// +// See full documentation at https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-node-stats.html. +type TransformGetNodeStats func(o ...func(*TransformGetNodeStatsRequest)) (*Response, error) + +// TransformGetNodeStatsRequest configures the Transform Get Node Stats API request. +type TransformGetNodeStatsRequest struct { + Pretty bool + Human bool + ErrorTrace bool + FilterPath []string + + Header http.Header + + ctx context.Context + + instrument Instrumentation +} + +// Do executes the request and returns response or error. +func (r TransformGetNodeStatsRequest) Do(providedCtx context.Context, transport Transport) (*Response, error) { + var ( + method string + path strings.Builder + params map[string]string + ctx context.Context + ) + + if instrument, ok := r.instrument.(Instrumentation); ok { + ctx = instrument.Start(providedCtx, "transform.get_node_stats") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + method = "GET" + + path.Grow(7 + len("/_transform/_node_stats")) + path.WriteString("http://") + path.WriteString("/_transform/_node_stats") + + params = make(map[string]string) + + if r.Pretty { + params["pretty"] = "true" + } + + if r.Human { + params["human"] = "true" + } + + if r.ErrorTrace { + params["error_trace"] = "true" + } + + if len(r.FilterPath) > 0 { + params["filter_path"] = strings.Join(r.FilterPath, ",") + } + + req, err := newRequest(method, path.String(), nil) + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if len(params) > 0 { + q := req.URL.Query() + for k, v := range params { + q.Set(k, v) + } + req.URL.RawQuery = q.Encode() + } + + if len(r.Header) > 0 { + if len(req.Header) == 0 { + req.Header = r.Header + } else { + for k, vv := range r.Header { + for _, v := range vv { + req.Header.Add(k, v) + } + } + } + } + + if ctx != nil { + req = req.WithContext(ctx) + } + + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.BeforeRequest(req, "transform.get_node_stats") + } + res, err := transport.Perform(req) + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "transform.get_node_stats") + } + if err != nil { + if instrument, ok := r.instrument.(Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + response := Response{ + StatusCode: res.StatusCode, + Body: res.Body, + Header: res.Header, + } + + return &response, nil +} + +// WithContext sets the request context. +func (f TransformGetNodeStats) WithContext(v context.Context) func(*TransformGetNodeStatsRequest) { + return func(r *TransformGetNodeStatsRequest) { + r.ctx = v + } +} + +// WithPretty makes the response body pretty-printed. +func (f TransformGetNodeStats) WithPretty() func(*TransformGetNodeStatsRequest) { + return func(r *TransformGetNodeStatsRequest) { + r.Pretty = true + } +} + +// WithHuman makes statistical values human-readable. +func (f TransformGetNodeStats) WithHuman() func(*TransformGetNodeStatsRequest) { + return func(r *TransformGetNodeStatsRequest) { + r.Human = true + } +} + +// WithErrorTrace includes the stack trace for errors in the response body. +func (f TransformGetNodeStats) WithErrorTrace() func(*TransformGetNodeStatsRequest) { + return func(r *TransformGetNodeStatsRequest) { + r.ErrorTrace = true + } +} + +// WithFilterPath filters the properties of the response body. +func (f TransformGetNodeStats) WithFilterPath(v ...string) func(*TransformGetNodeStatsRequest) { + return func(r *TransformGetNodeStatsRequest) { + r.FilterPath = v + } +} + +// WithHeader adds the headers to the HTTP request. +func (f TransformGetNodeStats) WithHeader(h map[string]string) func(*TransformGetNodeStatsRequest) { + return func(r *TransformGetNodeStatsRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + for k, v := range h { + r.Header.Add(k, v) + } + } +} + +// WithOpaqueID adds the X-Opaque-Id header to the HTTP request. +func (f TransformGetNodeStats) WithOpaqueID(s string) func(*TransformGetNodeStatsRequest) { + return func(r *TransformGetNodeStatsRequest) { + if r.Header == nil { + r.Header = make(http.Header) + } + r.Header.Set("X-Opaque-Id", s) + } +} diff --git a/esapi/api.xpack.transform.get_transform.go b/esapi/api.xpack.transform.get_transform.go index 58c370390f..ad1a8f738f 100644 --- a/esapi/api.xpack.transform.get_transform.go +++ b/esapi/api.xpack.transform.get_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.get_transform_stats.go b/esapi/api.xpack.transform.get_transform_stats.go index 179d59180e..8f523fc9a6 100644 --- a/esapi/api.xpack.transform.get_transform_stats.go +++ b/esapi/api.xpack.transform.get_transform_stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.preview_transform.go b/esapi/api.xpack.transform.preview_transform.go index 5e48c742cb..ea7a0622e1 100644 --- a/esapi/api.xpack.transform.preview_transform.go +++ b/esapi/api.xpack.transform.preview_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.put_transform.go b/esapi/api.xpack.transform.put_transform.go index cff70888b8..6b4efd5f65 100644 --- a/esapi/api.xpack.transform.put_transform.go +++ b/esapi/api.xpack.transform.put_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.reset_transform.go b/esapi/api.xpack.transform.reset_transform.go index 43278dc9bc..a64e0d3cdb 100644 --- a/esapi/api.xpack.transform.reset_transform.go +++ b/esapi/api.xpack.transform.reset_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.schedule_now_transform.go b/esapi/api.xpack.transform.schedule_now_transform.go index 4031fc5c5e..aaf069cf77 100644 --- a/esapi/api.xpack.transform.schedule_now_transform.go +++ b/esapi/api.xpack.transform.schedule_now_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.start_transform.go b/esapi/api.xpack.transform.start_transform.go index 6419026f6c..e7a412d4a2 100644 --- a/esapi/api.xpack.transform.start_transform.go +++ b/esapi/api.xpack.transform.start_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.stop_transform.go b/esapi/api.xpack.transform.stop_transform.go index 24c4097914..6a56235de0 100644 --- a/esapi/api.xpack.transform.stop_transform.go +++ b/esapi/api.xpack.transform.stop_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.update_transform.go b/esapi/api.xpack.transform.update_transform.go index 9e40e9f5c7..919bd49888 100644 --- a/esapi/api.xpack.transform.update_transform.go +++ b/esapi/api.xpack.transform.update_transform.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.transform.upgrade_transforms.go b/esapi/api.xpack.transform.upgrade_transforms.go index 0e89ce10a5..f3e1a005c9 100644 --- a/esapi/api.xpack.transform.upgrade_transforms.go +++ b/esapi/api.xpack.transform.upgrade_transforms.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.ack_watch.go b/esapi/api.xpack.watcher.ack_watch.go index 94b723f4c6..15e6cde81d 100644 --- a/esapi/api.xpack.watcher.ack_watch.go +++ b/esapi/api.xpack.watcher.ack_watch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.activate_watch.go b/esapi/api.xpack.watcher.activate_watch.go index e6cfa58c65..eae2a9a6a5 100644 --- a/esapi/api.xpack.watcher.activate_watch.go +++ b/esapi/api.xpack.watcher.activate_watch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.deactivate_watch.go b/esapi/api.xpack.watcher.deactivate_watch.go index 217992055d..497fa13b6e 100644 --- a/esapi/api.xpack.watcher.deactivate_watch.go +++ b/esapi/api.xpack.watcher.deactivate_watch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.delete_watch.go b/esapi/api.xpack.watcher.delete_watch.go index 9194f80d80..1cc889f991 100644 --- a/esapi/api.xpack.watcher.delete_watch.go +++ b/esapi/api.xpack.watcher.delete_watch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.execute_watch.go b/esapi/api.xpack.watcher.execute_watch.go index afddb02848..5c6277dc03 100644 --- a/esapi/api.xpack.watcher.execute_watch.go +++ b/esapi/api.xpack.watcher.execute_watch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.get_settings.go b/esapi/api.xpack.watcher.get_settings.go index 3a55b46d1e..934d875af7 100644 --- a/esapi/api.xpack.watcher.get_settings.go +++ b/esapi/api.xpack.watcher.get_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newWatcherGetSettingsFunc(t Transport) WatcherGetSettings { @@ -49,6 +50,8 @@ type WatcherGetSettings func(o ...func(*WatcherGetSettingsRequest)) (*Response, // WatcherGetSettingsRequest configures the Watcher Get Settings API request. type WatcherGetSettingsRequest struct { + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -86,6 +89,10 @@ func (r WatcherGetSettingsRequest) Do(providedCtx context.Context, transport Tra params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -164,6 +171,13 @@ func (f WatcherGetSettings) WithContext(v context.Context) func(*WatcherGetSetti } } +// WithMasterTimeout - specify timeout for connection to master. +func (f WatcherGetSettings) WithMasterTimeout(v time.Duration) func(*WatcherGetSettingsRequest) { + return func(r *WatcherGetSettingsRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f WatcherGetSettings) WithPretty() func(*WatcherGetSettingsRequest) { return func(r *WatcherGetSettingsRequest) { diff --git a/esapi/api.xpack.watcher.get_watch.go b/esapi/api.xpack.watcher.get_watch.go index b831ac6d2d..c98c963e60 100644 --- a/esapi/api.xpack.watcher.get_watch.go +++ b/esapi/api.xpack.watcher.get_watch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.put_watch.go b/esapi/api.xpack.watcher.put_watch.go index b5af1bebae..c2089be98b 100644 --- a/esapi/api.xpack.watcher.put_watch.go +++ b/esapi/api.xpack.watcher.put_watch.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.query_watches.go b/esapi/api.xpack.watcher.query_watches.go index 18d069d092..f804c7d3d4 100644 --- a/esapi/api.xpack.watcher.query_watches.go +++ b/esapi/api.xpack.watcher.query_watches.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.start.go b/esapi/api.xpack.watcher.start.go index 8bc9025e6c..ea82cffdc7 100644 --- a/esapi/api.xpack.watcher.start.go +++ b/esapi/api.xpack.watcher.start.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newWatcherStartFunc(t Transport) WatcherStart { @@ -49,6 +50,8 @@ type WatcherStart func(o ...func(*WatcherStartRequest)) (*Response, error) // WatcherStartRequest configures the Watcher Start API request. type WatcherStartRequest struct { + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -86,6 +89,10 @@ func (r WatcherStartRequest) Do(providedCtx context.Context, transport Transport params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -164,6 +171,13 @@ func (f WatcherStart) WithContext(v context.Context) func(*WatcherStartRequest) } } +// WithMasterTimeout - specify timeout for connection to master. +func (f WatcherStart) WithMasterTimeout(v time.Duration) func(*WatcherStartRequest) { + return func(r *WatcherStartRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f WatcherStart) WithPretty() func(*WatcherStartRequest) { return func(r *WatcherStartRequest) { diff --git a/esapi/api.xpack.watcher.stats.go b/esapi/api.xpack.watcher.stats.go index 86d60e2b83..99534c64c4 100644 --- a/esapi/api.xpack.watcher.stats.go +++ b/esapi/api.xpack.watcher.stats.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.watcher.stop.go b/esapi/api.xpack.watcher.stop.go index 8db72d3e27..caa1dc74ff 100644 --- a/esapi/api.xpack.watcher.stop.go +++ b/esapi/api.xpack.watcher.stop.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -23,6 +23,7 @@ import ( "context" "net/http" "strings" + "time" ) func newWatcherStopFunc(t Transport) WatcherStop { @@ -49,6 +50,8 @@ type WatcherStop func(o ...func(*WatcherStopRequest)) (*Response, error) // WatcherStopRequest configures the Watcher Stop API request. type WatcherStopRequest struct { + MasterTimeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -86,6 +89,10 @@ func (r WatcherStopRequest) Do(providedCtx context.Context, transport Transport) params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -164,6 +171,13 @@ func (f WatcherStop) WithContext(v context.Context) func(*WatcherStopRequest) { } } +// WithMasterTimeout - specify timeout for connection to master. +func (f WatcherStop) WithMasterTimeout(v time.Duration) func(*WatcherStopRequest) { + return func(r *WatcherStopRequest) { + r.MasterTimeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f WatcherStop) WithPretty() func(*WatcherStopRequest) { return func(r *WatcherStopRequest) { diff --git a/esapi/api.xpack.watcher.update_settings.go b/esapi/api.xpack.watcher.update_settings.go index 824473b910..c8a2e5fb21 100644 --- a/esapi/api.xpack.watcher.update_settings.go +++ b/esapi/api.xpack.watcher.update_settings.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi @@ -24,6 +24,7 @@ import ( "io" "net/http" "strings" + "time" ) func newWatcherUpdateSettingsFunc(t Transport) WatcherUpdateSettings { @@ -52,6 +53,9 @@ type WatcherUpdateSettings func(body io.Reader, o ...func(*WatcherUpdateSettings type WatcherUpdateSettingsRequest struct { Body io.Reader + MasterTimeout time.Duration + Timeout time.Duration + Pretty bool Human bool ErrorTrace bool @@ -89,6 +93,14 @@ func (r WatcherUpdateSettingsRequest) Do(providedCtx context.Context, transport params = make(map[string]string) + if r.MasterTimeout != 0 { + params["master_timeout"] = formatDuration(r.MasterTimeout) + } + + if r.Timeout != 0 { + params["timeout"] = formatDuration(r.Timeout) + } + if r.Pretty { params["pretty"] = "true" } @@ -174,6 +186,20 @@ func (f WatcherUpdateSettings) WithContext(v context.Context) func(*WatcherUpdat } } +// WithMasterTimeout - specify timeout for connection to master. +func (f WatcherUpdateSettings) WithMasterTimeout(v time.Duration) func(*WatcherUpdateSettingsRequest) { + return func(r *WatcherUpdateSettingsRequest) { + r.MasterTimeout = v + } +} + +// WithTimeout - specify timeout for waiting for acknowledgement from all nodes. +func (f WatcherUpdateSettings) WithTimeout(v time.Duration) func(*WatcherUpdateSettingsRequest) { + return func(r *WatcherUpdateSettingsRequest) { + r.Timeout = v + } +} + // WithPretty makes the response body pretty-printed. func (f WatcherUpdateSettings) WithPretty() func(*WatcherUpdateSettingsRequest) { return func(r *WatcherUpdateSettingsRequest) { diff --git a/esapi/api.xpack.xpack.info.go b/esapi/api.xpack.xpack.info.go index cdabb4b93a..4cb3a0e41f 100644 --- a/esapi/api.xpack.xpack.info.go +++ b/esapi/api.xpack.xpack.info.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/esapi/api.xpack.xpack.usage.go b/esapi/api.xpack.xpack.usage.go index 5f39648cb8..6519295e40 100644 --- a/esapi/api.xpack.xpack.usage.go +++ b/esapi/api.xpack.xpack.usage.go @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. // -// Code generated from specification version 8.14.0: DO NOT EDIT +// Code generated from specification version 8.15.0: DO NOT EDIT package esapi diff --git a/internal/build/cmd/generate/commands/gentests/skips.go b/internal/build/cmd/generate/commands/gentests/skips.go index 8e5c16f43f..6766e349e7 100644 --- a/internal/build/cmd/generate/commands/gentests/skips.go +++ b/internal/build/cmd/generate/commands/gentests/skips.go @@ -39,6 +39,7 @@ var skipFiles = []string{ "ml/jobs_get_result_buckets.yml", // Passes string value to int variable "ml/jobs_get_result_categories.yml", // --||-- "ml/set_upgrade_mode.yml", // --||-- + "ml/sparse_vector_search.yml", "ml/evaluate_data_frame.yml", // Floats as map keys @@ -48,20 +49,24 @@ var skipFiles = []string{ "search.highlight/20_fvh.yml", // bad backslash - "indices.stats/50_disk_usage.yml", // Needs a replacement mechanism implementation - "indices.stats/60_field_usage.yml", // Needs a replacement mechanism implementation + "indices.stats/50_disk_usage.yml", // Needs a replacement mechanism implementation + "indices.stats/60_field_usage.yml", // Needs a replacement mechanism implementation + "indices.stats/100_search_idle.yml", // incompatible maps of array "eql/10_basic.yml", "field_caps/50_fieldtype_filter.yml", // Incompatible test, need handling for double escaping keys with dots "aggregations/variable_width_histogram.yml", + "aggregations/percentiles_bucket.yml", // incompatible maps + "aggregations/scripted_metric.yml", // incompatible maps "cluster.desired_nodes/10_basic.yml", // incompatible $ stash replacement + "cluster.put_settings/10_basic.yml", // incompatible with testing stack "api_key/12_grant.yml", // incompatible $ stash replacement, need bearer token integration - "aggregations/percentiles_bucket.yml", // incompatible maps "user_profile/10_basic.yml", - "indices.stats/100_search_idle.yml", // incompatible maps of array - "ml/3rd_party_deployment.yml", // incompatible ml tests - "dlm/10_usage.yml", // incompatible float expansion + "ml/3rd_party_deployment.yml", // incompatible ml tests + "dlm/10_usage.yml", // incompatible float expansion "api_key/60_admin_user.yml", ".*esql\\/.*.yml", + "deprecation/10_basic.yml", // incompatible test generation + "search/520_fetch_fields.yml", // disabled for inconsistency } // TODO: Comments into descriptions for `Skip()` @@ -89,6 +94,10 @@ update/60_refresh.yml: update/61_refresh_with_types.yml: - When refresh url parameter is an empty string that means "refresh immediately" +# expected [status] to be green +cluster.health/10_basic.yml: + - cluster health with closed index (pre 7.2.0) + # catch: bad_request, Expected [status] to not be nil indices.data_stream/10_basic.yml: - Create data stream with invalid name @@ -133,6 +142,12 @@ cat.aliases/20_headers.yml: aggregations/range.yml: - Date range - Min and max long range bounds + - Float range + - Double range + +range/20_synthetic_source.yml: + - Float range + - Double range # Mismatch in number parsing, 8623000 != 8.623e+06 aggregations/geo_distance.yml: @@ -236,6 +251,7 @@ api_key/10_basic.yml: - Test invalidate api keys api_key/11_invalidation.yml: - Test invalidate api key by username + - Test invalidate api key by realm name api_key/21_query_with_aggs.yml: - Test composite aggs api key rollup/put_job.yml: @@ -411,6 +427,8 @@ data_stream/80_resolve_index_data_streams.yml: # Zero matchers like '...shards.0.stores.0.allocation:primary' expect array, not map data_stream/40_supported_apis.yml: - Verify shard stores api +aggregations/empty_field_metric.yml: + - Basic test # Failing with error 'Index [.security] is not on the current version. Security features relying on the index will not be available until the upgrade API is run on the index' data_stream/40_supported_apis.yml: @@ -485,13 +503,21 @@ user_profile/40_has_privileges.yml: # Bad type matching aggregate-metrics/100_synthetic_source.yml: - constant_keyword + - aggregate_metric_double + - aggregate_metric_double with ignore_malformed + analytics/histogram.yml: - histogram with synthetic source + - histogram with synthetic source and ignore_malformed # incompatible storage searchable_snapshots/20_synthetic_source.yml: - Tests searchable snapshots usage stats +ml/learning_to_rank_rescorer.yml: + - Test rescore with stored model and smaller window_size + - Test rescore with stored model and chained rescorers + # incompatible float format aggregations/max_metric.yml: - Merging results with unmapped fields @@ -500,6 +526,8 @@ aggregations/max_metric.yml: get/100_synthetic_source.yml: - indexed dense vectors - non-indexed dense vectors + - fields with ignore_malformed + - flattened field with ignore_above indices.stats/70_write_load.yml: - Write load average is tracked at shard level @@ -527,5 +555,17 @@ esql/40_unsupported_types.yml: esql/50_index_patterns.yml: - disjoint_mappings +# incompatible dot notation +logsdb/10_settings.yml: + - override sort order settings + - override sort missing settings + - override sort mode settings + +# expects map, got nil +search/520_fetch_fields.yml: + - fetch _ignored via stored_fields + - fetch _seq_no via stored_fields +spatial/140_synthetic_source.yml: + - point ` diff --git a/internal/testing/e2e/elasticsearch_integration_test.go b/internal/testing/e2e/elasticsearch_integration_test.go index db73d00246..e520a5d9ef 100644 --- a/internal/testing/e2e/elasticsearch_integration_test.go +++ b/internal/testing/e2e/elasticsearch_integration_test.go @@ -384,8 +384,8 @@ func TestElasticsearchIntegration(t *testing.T) { if name == "total_prices" { switch aggregation := agg.(type) { case *types.SumAggregate: - if aggregation.Value != 26. { - t.Fatalf("error in aggregation, should be 26, got: %f", aggregation.Value) + if *aggregation.Value != 26. { + t.Fatalf("error in aggregation, should be 26, got: %f", *aggregation.Value) } default: fmt.Printf("unexpected aggregation: %#v\n", agg) diff --git a/internal/version/version.go b/internal/version/version.go index 9084346353..d17c5333b8 100644 --- a/internal/version/version.go +++ b/internal/version/version.go @@ -18,4 +18,4 @@ package version // Client returns the client version as a string. -const Client = "8.14.0-SNAPSHOT" +const Client = "8.15.0-SNAPSHOT" diff --git a/typedapi/api._.go b/typedapi/api._.go index 2dcd40e3d6..9f6cc1f875 100644 --- a/typedapi/api._.go +++ b/typedapi/api._.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package typedapi @@ -30,6 +30,7 @@ import ( autoscaling_get_autoscaling_capacity "github.com/elastic/go-elasticsearch/v8/typedapi/autoscaling/getautoscalingcapacity" autoscaling_get_autoscaling_policy "github.com/elastic/go-elasticsearch/v8/typedapi/autoscaling/getautoscalingpolicy" autoscaling_put_autoscaling_policy "github.com/elastic/go-elasticsearch/v8/typedapi/autoscaling/putautoscalingpolicy" + capabilities "github.com/elastic/go-elasticsearch/v8/typedapi/capabilities" cat_aliases "github.com/elastic/go-elasticsearch/v8/typedapi/cat/aliases" cat_allocation "github.com/elastic/go-elasticsearch/v8/typedapi/cat/allocation" cat_component_templates "github.com/elastic/go-elasticsearch/v8/typedapi/cat/componenttemplates" @@ -141,6 +142,7 @@ import ( eql_get "github.com/elastic/go-elasticsearch/v8/typedapi/eql/get" eql_get_status "github.com/elastic/go-elasticsearch/v8/typedapi/eql/getstatus" eql_search "github.com/elastic/go-elasticsearch/v8/typedapi/eql/search" + esql_async_query "github.com/elastic/go-elasticsearch/v8/typedapi/esql/asyncquery" esql_query "github.com/elastic/go-elasticsearch/v8/typedapi/esql/query" features_get_features "github.com/elastic/go-elasticsearch/v8/typedapi/features/getfeatures" features_reset_features "github.com/elastic/go-elasticsearch/v8/typedapi/features/resetfeatures" @@ -219,10 +221,10 @@ import ( indices_unfreeze "github.com/elastic/go-elasticsearch/v8/typedapi/indices/unfreeze" indices_update_aliases "github.com/elastic/go-elasticsearch/v8/typedapi/indices/updatealiases" indices_validate_query "github.com/elastic/go-elasticsearch/v8/typedapi/indices/validatequery" - inference_delete_model "github.com/elastic/go-elasticsearch/v8/typedapi/inference/deletemodel" - inference_get_model "github.com/elastic/go-elasticsearch/v8/typedapi/inference/getmodel" + inference_delete "github.com/elastic/go-elasticsearch/v8/typedapi/inference/delete" + inference_get "github.com/elastic/go-elasticsearch/v8/typedapi/inference/get" inference_inference "github.com/elastic/go-elasticsearch/v8/typedapi/inference/inference" - inference_put_model "github.com/elastic/go-elasticsearch/v8/typedapi/inference/putmodel" + inference_put "github.com/elastic/go-elasticsearch/v8/typedapi/inference/put" ingest_delete_pipeline "github.com/elastic/go-elasticsearch/v8/typedapi/ingest/deletepipeline" ingest_geo_ip_stats "github.com/elastic/go-elasticsearch/v8/typedapi/ingest/geoipstats" ingest_get_pipeline "github.com/elastic/go-elasticsearch/v8/typedapi/ingest/getpipeline" @@ -311,6 +313,7 @@ import ( ml_update_filter "github.com/elastic/go-elasticsearch/v8/typedapi/ml/updatefilter" ml_update_job "github.com/elastic/go-elasticsearch/v8/typedapi/ml/updatejob" ml_update_model_snapshot "github.com/elastic/go-elasticsearch/v8/typedapi/ml/updatemodelsnapshot" + ml_update_trained_model_deployment "github.com/elastic/go-elasticsearch/v8/typedapi/ml/updatetrainedmodeldeployment" ml_upgrade_job_snapshot "github.com/elastic/go-elasticsearch/v8/typedapi/ml/upgradejobsnapshot" ml_validate "github.com/elastic/go-elasticsearch/v8/typedapi/ml/validate" ml_validate_detector "github.com/elastic/go-elasticsearch/v8/typedapi/ml/validatedetector" @@ -322,10 +325,17 @@ import ( nodes_reload_secure_settings "github.com/elastic/go-elasticsearch/v8/typedapi/nodes/reloadsecuresettings" nodes_stats "github.com/elastic/go-elasticsearch/v8/typedapi/nodes/stats" nodes_usage "github.com/elastic/go-elasticsearch/v8/typedapi/nodes/usage" - query_ruleset_delete "github.com/elastic/go-elasticsearch/v8/typedapi/queryruleset/delete" - query_ruleset_get "github.com/elastic/go-elasticsearch/v8/typedapi/queryruleset/get" - query_ruleset_list "github.com/elastic/go-elasticsearch/v8/typedapi/queryruleset/list" - query_ruleset_put "github.com/elastic/go-elasticsearch/v8/typedapi/queryruleset/put" + profiling_flamegraph "github.com/elastic/go-elasticsearch/v8/typedapi/profiling/flamegraph" + profiling_stacktraces "github.com/elastic/go-elasticsearch/v8/typedapi/profiling/stacktraces" + profiling_status "github.com/elastic/go-elasticsearch/v8/typedapi/profiling/status" + profiling_topn_functions "github.com/elastic/go-elasticsearch/v8/typedapi/profiling/topnfunctions" + query_rules_delete_rule "github.com/elastic/go-elasticsearch/v8/typedapi/queryrules/deleterule" + query_rules_delete_ruleset "github.com/elastic/go-elasticsearch/v8/typedapi/queryrules/deleteruleset" + query_rules_get_rule "github.com/elastic/go-elasticsearch/v8/typedapi/queryrules/getrule" + query_rules_get_ruleset "github.com/elastic/go-elasticsearch/v8/typedapi/queryrules/getruleset" + query_rules_list_rulesets "github.com/elastic/go-elasticsearch/v8/typedapi/queryrules/listrulesets" + query_rules_put_rule "github.com/elastic/go-elasticsearch/v8/typedapi/queryrules/putrule" + query_rules_put_ruleset "github.com/elastic/go-elasticsearch/v8/typedapi/queryrules/putruleset" rollup_delete_job "github.com/elastic/go-elasticsearch/v8/typedapi/rollup/deletejob" rollup_get_jobs "github.com/elastic/go-elasticsearch/v8/typedapi/rollup/getjobs" rollup_get_rollup_caps "github.com/elastic/go-elasticsearch/v8/typedapi/rollup/getrollupcaps" @@ -394,6 +404,7 @@ import ( security_put_role_mapping "github.com/elastic/go-elasticsearch/v8/typedapi/security/putrolemapping" security_put_user "github.com/elastic/go-elasticsearch/v8/typedapi/security/putuser" security_query_api_keys "github.com/elastic/go-elasticsearch/v8/typedapi/security/queryapikeys" + security_query_user "github.com/elastic/go-elasticsearch/v8/typedapi/security/queryuser" security_saml_authenticate "github.com/elastic/go-elasticsearch/v8/typedapi/security/samlauthenticate" security_saml_complete_logout "github.com/elastic/go-elasticsearch/v8/typedapi/security/samlcompletelogout" security_saml_invalidate "github.com/elastic/go-elasticsearch/v8/typedapi/security/samlinvalidate" @@ -444,9 +455,12 @@ import ( tasks_cancel "github.com/elastic/go-elasticsearch/v8/typedapi/tasks/cancel" tasks_get "github.com/elastic/go-elasticsearch/v8/typedapi/tasks/get" tasks_list "github.com/elastic/go-elasticsearch/v8/typedapi/tasks/list" + text_structure_find_field_structure "github.com/elastic/go-elasticsearch/v8/typedapi/textstructure/findfieldstructure" + text_structure_find_message_structure "github.com/elastic/go-elasticsearch/v8/typedapi/textstructure/findmessagestructure" text_structure_find_structure "github.com/elastic/go-elasticsearch/v8/typedapi/textstructure/findstructure" text_structure_test_grok_pattern "github.com/elastic/go-elasticsearch/v8/typedapi/textstructure/testgrokpattern" transform_delete_transform "github.com/elastic/go-elasticsearch/v8/typedapi/transform/deletetransform" + transform_get_node_stats "github.com/elastic/go-elasticsearch/v8/typedapi/transform/getnodestats" transform_get_transform "github.com/elastic/go-elasticsearch/v8/typedapi/transform/gettransform" transform_get_transform_stats "github.com/elastic/go-elasticsearch/v8/typedapi/transform/gettransformstats" transform_preview_transform "github.com/elastic/go-elasticsearch/v8/typedapi/transform/previewtransform" @@ -475,19 +489,40 @@ import ( ) type AsyncSearch struct { - // Deletes an async search by ID. If the search is still running, the search - // request will be cancelled. Otherwise, the saved search results are deleted. + // Deletes an async search by identifier. + // If the search is still running, the search request will be cancelled. + // Otherwise, the saved search results are deleted. + // If the Elasticsearch security features are enabled, the deletion of a + // specific async search is restricted to: the authenticated user that submitted + // the original search request; users that have the `cancel_task` cluster + // privilege. // https://www.elastic.co/guide/en/elasticsearch/reference/current/async-search.html Delete async_search_delete.NewDelete // Retrieves the results of a previously submitted async search request given - // its ID. + // its identifier. + // If the Elasticsearch security features are enabled, access to the results of + // a specific async search is restricted to the user or API key that submitted + // it. // https://www.elastic.co/guide/en/elasticsearch/reference/current/async-search.html Get async_search_get.NewGet + // Get async search status // Retrieves the status of a previously submitted async search request given its - // ID. + // identifier, without retrieving search results. + // If the Elasticsearch security features are enabled, use of this API is + // restricted to the `monitoring_user` role. // https://www.elastic.co/guide/en/elasticsearch/reference/current/async-search.html Status async_search_status.NewStatus - // Executes a search request asynchronously. + // Runs a search request asynchronously. + // When the primary sort of the results is an indexed field, shards get sorted + // based on minimum and maximum value that they hold for that field, hence + // partial results become available following the sort criteria that was + // requested. + // Warning: Async search does not support scroll nor search requests that only + // include the suggest section. + // By default, Elasticsearch doesn’t allow you to store an async search response + // larger than 10Mb and an attempt to do this results in an error. + // The maximum allowed size for a stored async search response can be set by + // changing the `search.max_async_search_response_size` cluster level setting. // https://www.elastic.co/guide/en/elasticsearch/reference/current/async-search.html Submit async_search_submit.NewSubmit } @@ -512,91 +547,224 @@ type Autoscaling struct { PutAutoscalingPolicy autoscaling_put_autoscaling_policy.NewPutAutoscalingPolicy } +type Capabilities struct { + // Checks if the specified combination of method, API, parameters, and arbitrary + // capabilities are supported + // https://www.elastic.co/guide/en/elasticsearch/reference/current/capabilities.html + Capabilities capabilities.NewCapabilities +} + type Cat struct { - // Shows information about currently configured aliases to indices including - // filter and routing infos. + // Retrieves the cluster’s index aliases, including filter and routing + // information. + // The API does not return data stream aliases. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or the Kibana console. They are not intended for use by applications. + // For application consumption, use the aliases API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-alias.html Aliases cat_aliases.NewAliases - // Provides a snapshot of how many shards are allocated to each data node and - // how much disk space they are using. + // Provides a snapshot of the number of shards allocated to each data node and + // their disk space. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. They are not intended for use by applications. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-allocation.html Allocation cat_allocation.NewAllocation - // Returns information about existing component_templates templates. + // Returns information about component templates in a cluster. + // Component templates are building blocks for constructing index templates that + // specify index mappings, settings, and aliases. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. + // They are not intended for use by applications. For application consumption, + // use the get component template API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-component-templates.html ComponentTemplates cat_component_templates.NewComponentTemplates - // Provides quick access to the document count of the entire cluster, or - // individual indices. + // Provides quick access to a document count for a data stream, an index, or an + // entire cluster. + // NOTE: The document count only includes live documents, not deleted documents + // which have not yet been removed by the merge process. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. + // They are not intended for use by applications. For application consumption, + // use the count API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-count.html Count cat_count.NewCount - // Shows how much heap memory is currently being used by fielddata on every data - // node in the cluster. + // Returns the amount of heap memory currently used by the field data cache on + // every data node in the cluster. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. + // They are not intended for use by applications. For application consumption, + // use the nodes stats API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-fielddata.html Fielddata cat_fielddata.NewFielddata - // Returns a concise representation of the cluster health. + // Returns the health status of a cluster, similar to the cluster health API. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. + // They are not intended for use by applications. For application consumption, + // use the cluster health API. + // This API is often used to check malfunctioning clusters. + // To help you track cluster health alongside log files and alerting systems, + // the API returns timestamps in two formats: + // `HH:MM:SS`, which is human-readable but includes no date information; + // `Unix epoch time`, which is machine-sortable and includes date information. + // The latter format is useful for cluster recoveries that take multiple days. + // You can use the cat health API to verify cluster health across multiple + // nodes. + // You also can use the API to track the recovery of a large cluster over a + // longer period of time. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-health.html Health cat_health.NewHealth // Returns help for the Cat APIs. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat.html Help cat_help.NewHelp - // Returns information about indices: number of primaries and replicas, document - // counts, disk size, ... + // Returns high-level information about indices in a cluster, including backing + // indices for data streams. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. + // They are not intended for use by applications. For application consumption, + // use the get index API. + // Use the cat indices API to get the following information for each index in a + // cluster: shard count; document count; deleted document count; primary store + // size; total store size of all shards, including shard replicas. + // These metrics are retrieved directly from Lucene, which Elasticsearch uses + // internally to power indexing and search. As a result, all document counts + // include hidden nested documents. + // To get an accurate count of Elasticsearch documents, use the cat count or + // count APIs. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-indices.html Indices cat_indices.NewIndices - // Returns information about the master node. + // Returns information about the master node, including the ID, bound IP + // address, and name. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. They are not intended for use by applications. For + // application consumption, use the nodes info API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-master.html Master cat_master.NewMaster - // Gets configuration and usage information about data frame analytics jobs. + // Returns configuration and usage information about data frame analytics jobs. + // + // IMPORTANT: cat APIs are only intended for human consumption using the Kibana + // console or command line. They are not intended for use by applications. For + // application consumption, use the get data frame analytics jobs statistics + // API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-dfanalytics.html MlDataFrameAnalytics cat_ml_data_frame_analytics.NewMlDataFrameAnalytics - // Gets configuration and usage information about datafeeds. + // Returns configuration and usage information about datafeeds. + // This API returns a maximum of 10,000 datafeeds. + // If the Elasticsearch security features are enabled, you must have + // `monitor_ml`, `monitor`, `manage_ml`, or `manage` + // cluster privileges to use this API. + // + // IMPORTANT: cat APIs are only intended for human consumption using the Kibana + // console or command line. They are not intended for use by applications. For + // application consumption, use the get datafeed statistics API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-datafeeds.html MlDatafeeds cat_ml_datafeeds.NewMlDatafeeds - // Gets configuration and usage information about anomaly detection jobs. + // Returns configuration and usage information for anomaly detection jobs. + // This API returns a maximum of 10,000 jobs. + // If the Elasticsearch security features are enabled, you must have + // `monitor_ml`, + // `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. + // + // IMPORTANT: cat APIs are only intended for human consumption using the Kibana + // console or command line. They are not intended for use by applications. For + // application consumption, use the get anomaly detection job statistics API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-anomaly-detectors.html MlJobs cat_ml_jobs.NewMlJobs - // Gets configuration and usage information about inference trained models. + // Returns configuration and usage information about inference trained models. + // + // IMPORTANT: cat APIs are only intended for human consumption using the Kibana + // console or command line. They are not intended for use by applications. For + // application consumption, use the get trained models statistics API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-trained-model.html MlTrainedModels cat_ml_trained_models.NewMlTrainedModels // Returns information about custom node attributes. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. They are not intended for use by applications. For + // application consumption, use the nodes info API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-nodeattrs.html Nodeattrs cat_nodeattrs.NewNodeattrs - // Returns basic statistics about performance of cluster nodes. + // Returns information about the nodes in a cluster. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. They are not intended for use by applications. For + // application consumption, use the nodes info API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-nodes.html Nodes cat_nodes.NewNodes - // Returns a concise representation of the cluster pending tasks. + // Returns cluster-level changes that have not yet been executed. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. They are not intended for use by applications. For + // application consumption, use the pending cluster tasks API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-pending-tasks.html PendingTasks cat_pending_tasks.NewPendingTasks - // Returns information about installed plugins across nodes node. + // Returns a list of plugins running on each node of a cluster. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. They are not intended for use by applications. For + // application consumption, use the nodes info API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-plugins.html Plugins cat_plugins.NewPlugins - // Returns information about index shard recoveries, both on-going completed. + // Returns information about ongoing and completed shard recoveries. + // Shard recovery is the process of initializing a shard copy, such as restoring + // a primary shard from a snapshot or syncing a replica shard from a primary + // shard. When a shard recovery completes, the recovered shard is available for + // search and indexing. + // For data streams, the API returns information about the stream’s backing + // indices. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. They are not intended for use by applications. For + // application consumption, use the index recovery API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-recovery.html Recovery cat_recovery.NewRecovery - // Returns information about snapshot repositories registered in the cluster. + // Returns the snapshot repositories for a cluster. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. They are not intended for use by applications. For + // application consumption, use the get snapshot repository API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-repositories.html Repositories cat_repositories.NewRepositories - // Provides low-level information about the segments in the shards of an index. + // Returns low-level information about the Lucene segments in index shards. + // For data streams, the API returns information about the backing indices. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. They are not intended for use by applications. For + // application consumption, use the index segments API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-segments.html Segments cat_segments.NewSegments - // Provides a detailed view of shard allocation on nodes. + // Returns information about the shards in a cluster. + // For data streams, the API returns information about the backing indices. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. They are not intended for use by applications. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-shards.html Shards cat_shards.NewShards - // Returns all snapshots in a specific repository. + // Returns information about the snapshots stored in one or more repositories. + // A snapshot is a backup of an index or running Elasticsearch cluster. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. They are not intended for use by applications. For + // application consumption, use the get snapshot API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-snapshots.html Snapshots cat_snapshots.NewSnapshots - // Returns information about the tasks currently executing on one or more nodes - // in the cluster. + // Returns information about tasks currently executing in the cluster. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. They are not intended for use by applications. For + // application consumption, use the task management API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html Tasks cat_tasks.NewTasks - // Returns information about existing templates. + // Returns information about index templates in a cluster. + // You can use index templates to apply index settings and field mappings to new + // indices at creation. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. They are not intended for use by applications. For + // application consumption, use the get index template API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-templates.html Templates cat_templates.NewTemplates - // Returns cluster-wide thread pool statistics per node. - // By default the active, queue and rejected statistics are returned for all - // thread pools. + // Returns thread pool statistics for each node in a cluster. + // Returned information includes all built-in thread pools and custom thread + // pools. + // IMPORTANT: cat APIs are only intended for human consumption using the command + // line or Kibana console. They are not intended for use by applications. For + // application consumption, use the nodes info API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-thread-pool.html ThreadPool cat_thread_pool.NewThreadPool - // Gets configuration and usage information about transforms. + // Returns configuration and usage information about transforms. + // + // IMPORTANT: cat APIs are only intended for human consumption using the Kibana + // console or command line. They are not intended for use by applications. For + // application consumption, use the get transform statistics API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-transforms.html Transforms cat_transforms.NewTransforms } @@ -655,7 +823,9 @@ type Cluster struct { // Provides explanations for shard allocations in the cluster. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-allocation-explain.html AllocationExplain cluster_allocation_explain.NewAllocationExplain - // Deletes a component template + // Deletes component templates. + // Component templates are building blocks for constructing index templates that + // specify index mappings, settings, and aliases. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-component-template.html DeleteComponentTemplate cluster_delete_component_template.NewDeleteComponentTemplate // Clears cluster voting config exclusions. @@ -664,33 +834,73 @@ type Cluster struct { // Returns information about whether a particular component template exist // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-component-template.html ExistsComponentTemplate cluster_exists_component_template.NewExistsComponentTemplate - // Returns one or more component templates + // Retrieves information about component templates. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-component-template.html GetComponentTemplate cluster_get_component_template.NewGetComponentTemplate - // Returns cluster settings. + // Returns cluster-wide settings. + // By default, it returns only settings that have been explicitly defined. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-get-settings.html GetSettings cluster_get_settings.NewGetSettings - // Returns basic information about the health of the cluster. + // The cluster health API returns a simple status on the health of the cluster. + // You can also use the API to get the health status of only specified data + // streams and indices. For data streams, the API retrieves the health status of + // the stream’s backing indices. + // The cluster health status is: green, yellow or red. On the shard level, a red + // status indicates that the specific shard is not allocated in the cluster, + // yellow means that the primary shard is allocated but replicas are not, and + // green means that all shards are allocated. The index level status is + // controlled by the worst shard status. The cluster status is controlled by the + // worst index status. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-health.html Health cluster_health.NewHealth // Returns different information about the cluster. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-info.html Info cluster_info.NewInfo - // Returns a list of any cluster-level changes (e.g. create index, update - // mapping, - // allocate or fail shard) which have not yet been executed. + // Returns cluster-level changes (such as create index, update mapping, allocate + // or fail shard) that have not yet been executed. + // NOTE: This API returns a list of any pending updates to the cluster state. + // These are distinct from the tasks reported by the Task Management API which + // include periodic tasks and tasks initiated by the user, such as node stats, + // search queries, or create index requests. + // However, if a user-initiated task such as a create index command causes a + // cluster state update, the activity of this task might be reported by both + // task api and pending cluster tasks API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-pending.html PendingTasks cluster_pending_tasks.NewPendingTasks // Updates the cluster voting config exclusions by node ids or node names. // https://www.elastic.co/guide/en/elasticsearch/reference/current/voting-config-exclusions.html PostVotingConfigExclusions cluster_post_voting_config_exclusions.NewPostVotingConfigExclusions - // Creates or updates a component template + // Creates or updates a component template. + // Component templates are building blocks for constructing index templates that + // specify index mappings, settings, and aliases. + // + // An index template can be composed of multiple component templates. + // To use a component template, specify it in an index template’s `composed_of` + // list. + // Component templates are only applied to new data streams and indices as part + // of a matching index template. + // + // Settings and mappings specified directly in the index template or the create + // index request override any settings or mappings specified in a component + // template. + // + // Component templates are only used during index creation. + // For data streams, this includes data stream creation and the creation of a + // stream’s backing indices. + // Changes to component templates do not affect existing indices, including a + // stream’s backing indices. + // + // You can use C-style `/* *\/` block comments in component templates. + // You can include comments anywhere in the request body except before the + // opening curly bracket. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-component-template.html PutComponentTemplate cluster_put_component_template.NewPutComponentTemplate // Updates the cluster settings. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-update-settings.html PutSettings cluster_put_settings.NewPutSettings - // Returns the information about configured remote clusters. + // The cluster remote info API allows you to retrieve all of the configured + // remote cluster information. It returns connection and endpoint information + // keyed by the configured remote cluster alias. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-remote-info.html RemoteInfo cluster_remote_info.NewRemoteInfo // Allows to manually change the allocation of individual shards in the cluster. @@ -699,61 +909,69 @@ type Cluster struct { // Returns a comprehensive information about the state of the cluster. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-state.html State cluster_state.NewState - // Returns high-level overview of cluster statistics. + // Returns cluster statistics. + // It returns basic index metrics (shard numbers, store size, memory usage) and + // information about the current nodes that form the cluster (number, roles, os, + // jvm versions, memory usage, cpu and installed plugins). // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-stats.html Stats cluster_stats.NewStats } type Core struct { - // Allows to perform multiple index/update/delete operations in a single - // request. + // Performs multiple indexing or delete operations in a single API call. + // This reduces overhead and can greatly increase indexing speed. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html Bulk core_bulk.NewBulk - // Explicitly clears the search context for a scroll. + // Clears the search context and results for a scrolling search. // https://www.elastic.co/guide/en/elasticsearch/reference/current/clear-scroll-api.html ClearScroll core_clear_scroll.NewClearScroll - // Close a point in time + // Closes a point-in-time. // https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html ClosePointInTime core_close_point_in_time.NewClosePointInTime // Returns number of documents matching a query. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-count.html Count core_count.NewCount - // Creates a new document in the index. - // - // Returns a 409 response when a document with a same ID already exists in the - // index. + // Adds a JSON document to the specified data stream or index and makes it + // searchable. + // If the target is an index and the document already exists, the request + // updates the document and increments its version. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html Create core_create.NewCreate - // Removes a document from the index. + // Removes a JSON document from the specified index. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete.html Delete core_delete.NewDelete - // Deletes documents matching the provided query. + // Deletes documents that match the specified query. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete-by-query.html DeleteByQuery core_delete_by_query.NewDeleteByQuery // Changes the number of requests per second for a particular Delete By Query // operation. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete-by-query.html DeleteByQueryRethrottle core_delete_by_query_rethrottle.NewDeleteByQueryRethrottle - // Deletes a script. + // Deletes a stored script or search template. // https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html DeleteScript core_delete_script.NewDeleteScript - // Returns information about whether a document exists in an index. + // Checks if a document in an index exists. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-get.html Exists core_exists.NewExists - // Returns information about whether a document source exists in an index. + // Checks if a document's `_source` is stored. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-get.html ExistsSource core_exists_source.NewExistsSource - // Returns information about why a specific matches (or doesn't match) a query. + // Returns information about why a specific document matches (or doesn’t match) + // a query. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-explain.html Explain core_explain.NewExplain - // Returns the information about the capabilities of fields among multiple - // indices. + // The field capabilities API returns the information about the capabilities of + // fields among multiple indices. + // The field capabilities API returns runtime fields like any other field. For + // example, a runtime field with a type + // of keyword is returned as any other field that belongs to the `keyword` + // family. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-field-caps.html FieldCaps core_field_caps.NewFieldCaps // Returns a document. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-get.html Get core_get.NewGet - // Returns a script. + // Retrieves a stored script or search template. // https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html GetScript core_get_script.NewGetScript // Returns all script contexts. @@ -768,7 +986,10 @@ type Core struct { // Returns the health of the cluster. // https://www.elastic.co/guide/en/elasticsearch/reference/current/health-api.html HealthReport core_health_report.NewHealthReport - // Creates or updates a document in an index. + // Adds a JSON document to the specified data stream or index and makes it + // searchable. + // If the target is an index and the document already exists, the request + // updates the document and increments its version. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html Index core_index.NewIndex // Returns basic information about the cluster. @@ -783,23 +1004,33 @@ type Core struct { // Allows to execute several search operations in one request. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-multi-search.html Msearch core_msearch.NewMsearch - // Allows to execute several search template operations in one request. + // Runs multiple templated searches with a single request. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-multi-search.html MsearchTemplate core_msearch_template.NewMsearchTemplate // Returns multiple termvectors in one request. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-multi-termvectors.html Mtermvectors core_mtermvectors.NewMtermvectors - // Open a point in time that can be used in subsequent searches + // A search request by default executes against the most recent visible data of + // the target indices, + // which is called point in time. Elasticsearch pit (point in time) is a + // lightweight view into the + // state of the data as it existed when initiated. In some cases, it’s preferred + // to perform multiple + // search requests using the same point in time. For example, if refreshes + // happen between + // `search_after` requests, then the results of those requests might not be + // consistent as changes happening + // between searches are only visible to the more recent point in time. // https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html OpenPointInTime core_open_point_in_time.NewOpenPointInTime // Returns whether the cluster is running. // https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html Ping core_ping.NewPing - // Creates or updates a script. + // Creates or updates a stored script or search template. // https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html PutScript core_put_script.NewPutScript - // Allows to evaluate the quality of ranked search results over a set of typical - // search queries + // Enables you to evaluate the quality of ranked search results over a set of + // typical search queries. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-rank-eval.html RankEval core_rank_eval.NewRankEval // Allows to copy documents from one index to another, optionally filtering the @@ -809,19 +1040,22 @@ type Core struct { // documents from a remote cluster. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html Reindex core_reindex.NewReindex - // Changes the number of requests per second for a particular Reindex operation. + // Copies documents from a source to a destination. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html ReindexRethrottle core_reindex_rethrottle.NewReindexRethrottle - // Allows to use the Mustache language to pre-render a search definition. + // Renders a search template as a search request body. // https://www.elastic.co/guide/en/elasticsearch/reference/current/render-search-template-api.html RenderSearchTemplate core_render_search_template.NewRenderSearchTemplate - // Allows an arbitrary script to be executed and a result to be returned + // Runs a script and returns a result. // https://www.elastic.co/guide/en/elasticsearch/painless/current/painless-execute-api.html ScriptsPainlessExecute core_scripts_painless_execute.NewScriptsPainlessExecute // Allows to retrieve a large numbers of results from a single search request. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-body.html#request-body-search-scroll Scroll core_scroll.NewScroll - // Returns results matching a query. + // Returns search hits that match the query defined in the request. + // You can provide search queries using the `q` query string parameter or the + // request body. + // If both are specified, only the query parameter is used. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-search.html Search core_search.NewSearch // Searches a vector tile for geospatial values. Returns results as a binary @@ -832,7 +1066,7 @@ type Core struct { // be executed against. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-shards.html SearchShards core_search_shards.NewSearchShards - // Allows to use the Mustache language to pre-render a search definition. + // Runs a search with a search template. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html SearchTemplate core_search_template.NewSearchTemplate // The terms enum API can be used to discover terms in the index that begin @@ -847,10 +1081,10 @@ type Core struct { // Updates a document with a script or partial document. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update.html Update core_update.NewUpdate - // Updates documents that match the specified query. If no query is specified, - // performs an update on every document in the index without changing the - // source, - // for example to pick up a mapping change. + // Updates documents that match the specified query. + // If no query is specified, performs an update on every document in the data + // stream or index without modifying the source, which is useful for picking up + // mapping changes. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update-by-query.html UpdateByQuery core_update_by_query.NewUpdateByQuery // Changes the number of requests per second for a particular Update By Query @@ -878,29 +1112,29 @@ type Enrich struct { // Creates the enrich index for an existing enrich policy. // https://www.elastic.co/guide/en/elasticsearch/reference/current/execute-enrich-policy-api.html ExecutePolicy enrich_execute_policy.NewExecutePolicy - // Gets information about an enrich policy. + // Returns information about an enrich policy. // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-enrich-policy-api.html GetPolicy enrich_get_policy.NewGetPolicy - // Creates a new enrich policy. + // Creates an enrich policy. // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-enrich-policy-api.html PutPolicy enrich_put_policy.NewPutPolicy - // Gets enrich coordinator statistics and information about enrich policies that - // are currently executing. + // Returns enrich coordinator statistics and information about enrich policies + // that are currently executing. // https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-stats-api.html Stats enrich_stats.NewStats } type Eql struct { - // Deletes an async EQL search by ID. If the search is still running, the search - // request will be cancelled. Otherwise, the saved search results are deleted. + // Deletes an async EQL search or a stored synchronous EQL search. + // The API also deletes results for the search. // https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-search-api.html Delete eql_delete.NewDelete - // Returns async results from previously executed Event Query Language (EQL) - // search + // Returns the current status and available results for an async EQL search or a + // stored synchronous EQL search. // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-async-eql-search-api.html Get eql_get.NewGet - // Returns the status of a previously submitted async or stored Event Query - // Language (EQL) search + // Returns the current status for an async EQL search or a stored synchronous + // EQL search without returning results. // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-async-eql-status-api.html GetStatus eql_get_status.NewGetStatus // Returns results matching a query expressed in Event Query Language (EQL) @@ -909,7 +1143,10 @@ type Eql struct { } type Esql struct { - // Executes an ESQL request + // Executes an ESQL request asynchronously + // https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-async-query-api.html + AsyncQuery esql_async_query.NewAsyncQuery + // Executes an ES|QL request // https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-rest.html Query esql_query.NewQuery } @@ -929,52 +1166,64 @@ type Fleet struct { // internal use by the fleet server project. // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-global-checkpoints.html GlobalCheckpoints fleet_global_checkpoints.NewGlobalCheckpoints - // Multi Search API where the search will only be executed after specified - // checkpoints are available due to a refresh. This API is designed for internal - // use by the fleet server project. + // Executes several [fleet + // searches](https://www.elastic.co/guide/en/elasticsearch/reference/current/fleet-search.html) + // with a single API request. + // The API follows the same structure as the [multi + // search](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-multi-search.html) + // API. However, similar to the fleet search API, it + // supports the wait_for_checkpoints parameter. // Msearch fleet_msearch.NewMsearch // Creates a secret stored by Fleet. // PostSecret fleet_post_secret.NewPostSecret - // Search API where the search will only be executed after specified checkpoints - // are available due to a refresh. This API is designed for internal use by the - // fleet server project. + // The purpose of the fleet search api is to provide a search api where the + // search will only be executed + // after provided checkpoint has been processed and is visible for searches + // inside of Elasticsearch. // Search fleet_search.NewSearch } type Graph struct { - // Explore extracted and summarized information about the documents and terms in - // an index. + // Extracts and summarizes information about the documents and terms in an + // Elasticsearch data stream or index. // https://www.elastic.co/guide/en/elasticsearch/reference/current/graph-explore-api.html Explore graph_explore.NewExplore } type Ilm struct { - // Deletes the specified lifecycle policy definition. A currently used policy - // cannot be deleted. + // Deletes the specified lifecycle policy definition. You cannot delete policies + // that are currently in use. If the policy is being used to manage any indices, + // the request fails and returns an error. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-delete-lifecycle.html DeleteLifecycle ilm_delete_lifecycle.NewDeleteLifecycle - // Retrieves information about the index's current lifecycle state, such as the - // currently executing phase, action, and step. + // Retrieves information about the index’s current lifecycle state, such as the + // currently executing phase, action, and step. Shows when the index entered + // each one, the definition of the running phase, and information about any + // failures. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-explain-lifecycle.html ExplainLifecycle ilm_explain_lifecycle.NewExplainLifecycle - // Returns the specified policy definition. Includes the policy version and last - // modified date. + // Retrieves a lifecycle policy. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-get-lifecycle.html GetLifecycle ilm_get_lifecycle.NewGetLifecycle // Retrieves the current index lifecycle management (ILM) status. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-get-status.html GetStatus ilm_get_status.NewGetStatus - // Migrates the indices and ILM policies away from custom node attribute - // allocation routing to data tiers routing + // Switches the indices, ILM policies, and legacy, composable and component + // templates from using custom node attributes and + // attribute-based allocation filters to using data tiers, and optionally + // deletes one legacy index template.+ + // Using node roles enables ILM to automatically move the indices between data + // tiers. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-migrate-to-data-tiers.html MigrateToDataTiers ilm_migrate_to_data_tiers.NewMigrateToDataTiers // Manually moves an index into the specified step and executes that step. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-move-to-step.html MoveToStep ilm_move_to_step.NewMoveToStep - // Creates a lifecycle policy + // Creates a lifecycle policy. If the specified policy exists, the policy is + // replaced and the policy version is incremented. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-put-lifecycle.html PutLifecycle ilm_put_lifecycle.NewPutLifecycle // Removes the assigned lifecycle policy and stops managing the specified index @@ -996,61 +1245,71 @@ type Indices struct { // Adds a block to an index. // https://www.elastic.co/guide/en/elasticsearch/reference/current/index-modules-blocks.html AddBlock indices_add_block.NewAddBlock - // Performs the analysis process on a text and return the tokens breakdown of - // the text. + // Performs analysis on a text string and returns the resulting tokens. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-analyze.html Analyze indices_analyze.NewAnalyze - // Clears all or specific caches for one or more indices. + // Clears the caches of one or more indices. + // For data streams, the API clears the caches of the stream’s backing indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-clearcache.html ClearCache indices_clear_cache.NewClearCache - // Clones an index + // Clones an existing index. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-clone-index.html Clone indices_clone.NewClone // Closes an index. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-close.html Close indices_close.NewClose - // Creates an index with optional settings and mappings. + // Creates a new index. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html Create indices_create.NewCreate - // Creates a data stream + // Creates a data stream. + // You must have a matching index template with data stream enabled. // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html CreateDataStream indices_create_data_stream.NewCreateDataStream - // Provides statistics on operations happening in a data stream. + // Retrieves statistics for one or more data streams. // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html DataStreamsStats indices_data_streams_stats.NewDataStreamsStats - // Deletes an index. + // Deletes one or more indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-delete-index.html Delete indices_delete.NewDelete - // Deletes an alias. + // Removes a data stream or index from an alias. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html DeleteAlias indices_delete_alias.NewDeleteAlias - // Deletes the data stream lifecycle of the selected data streams. + // Removes the data lifecycle from a data stream rendering it not managed by the + // data stream lifecycle // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams-delete-lifecycle.html DeleteDataLifecycle indices_delete_data_lifecycle.NewDeleteDataLifecycle - // Deletes a data stream. + // Deletes one or more data streams and their backing indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html DeleteDataStream indices_delete_data_stream.NewDeleteDataStream - // Deletes an index template. + // Delete an index template. + // The provided may contain multiple template names separated + // by a comma. If multiple template + // names are specified then there is no wildcard support and the provided names + // should match completely with + // existing templates. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-delete-template.html DeleteIndexTemplate indices_delete_index_template.NewDeleteIndexTemplate - // Deletes an index template. + // Deletes a legacy index template. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-delete-template-v1.html DeleteTemplate indices_delete_template.NewDeleteTemplate - // Analyzes the disk usage of each field of an index or data stream + // Analyzes the disk usage of each field of an index or data stream. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-disk-usage.html DiskUsage indices_disk_usage.NewDiskUsage - // Downsample an index + // Aggregates a time series (TSDS) index and stores pre-computed statistical + // summaries (`min`, `max`, `sum`, `value_count` and `avg`) for each metric + // field grouped by a configured time interval. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-downsample-data-stream.html Downsample indices_downsample.NewDownsample - // Returns information about whether a particular index exists. + // Checks if a data stream, index, or alias exists. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-exists.html Exists indices_exists.NewExists - // Returns information about whether a particular alias exists. + // Checks if an alias exists. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html ExistsAlias indices_exists_alias.NewExistsAlias // Returns information about whether a particular index template exists. // https://www.elastic.co/guide/en/elasticsearch/reference/current/index-templates.html ExistsIndexTemplate indices_exists_index_template.NewExistsIndexTemplate + // Check existence of index templates. // Returns information about whether a particular index template exists. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-template-exists-v1.html ExistsTemplate indices_exists_template.NewExistsTemplate @@ -1058,160 +1317,213 @@ type Indices struct { // as any potential encountered error, time since creation etc. // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams-explain-lifecycle.html ExplainDataLifecycle indices_explain_data_lifecycle.NewExplainDataLifecycle - // Returns the field usage stats for each field of an index + // Returns field usage information for each shard and field of an index. // https://www.elastic.co/guide/en/elasticsearch/reference/current/field-usage-stats.html FieldUsageStats indices_field_usage_stats.NewFieldUsageStats - // Performs the flush operation on one or more indices. + // Flushes one or more data streams or indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-flush.html Flush indices_flush.NewFlush // Performs the force merge operation on one or more indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-forcemerge.html Forcemerge indices_forcemerge.NewForcemerge - // Returns information about one or more indices. + // Returns information about one or more indices. For data streams, the API + // returns information about the + // stream’s backing indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-index.html Get indices_get.NewGet - // Returns an alias. + // Retrieves information for one or more aliases. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html GetAlias indices_get_alias.NewGetAlias - // Returns the data stream lifecycle of the selected data streams. + // Retrieves the data stream lifecycle configuration of one or more data + // streams. // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams-get-lifecycle.html GetDataLifecycle indices_get_data_lifecycle.NewGetDataLifecycle - // Returns data streams. + // Retrieves information about one or more data streams. // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html GetDataStream indices_get_data_stream.NewGetDataStream - // Returns mapping for one or more fields. + // Retrieves mapping definitions for one or more fields. + // For data streams, the API retrieves field mappings for the stream’s backing + // indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-field-mapping.html GetFieldMapping indices_get_field_mapping.NewGetFieldMapping - // Returns an index template. + // Get index templates. + // Returns information about one or more index templates. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-template.html GetIndexTemplate indices_get_index_template.NewGetIndexTemplate - // Returns mappings for one or more indices. + // Retrieves mapping definitions for one or more indices. + // For data streams, the API retrieves mappings for the stream’s backing + // indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-mapping.html GetMapping indices_get_mapping.NewGetMapping - // Returns settings for one or more indices. + // Returns setting information for one or more indices. For data streams, + // returns setting information for the stream’s backing indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-settings.html GetSettings indices_get_settings.NewGetSettings - // Returns an index template. + // Get index templates. + // Retrieves information about one or more index templates. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-template-v1.html GetTemplate indices_get_template.NewGetTemplate - // Migrates an alias to a data stream + // Converts an index alias to a data stream. + // You must have a matching index template that is data stream enabled. + // The alias must meet the following criteria: + // The alias must have a write index; + // All indices for the alias must have a `@timestamp` field mapping of a `date` + // or `date_nanos` field type; + // The alias must not have any filters; + // The alias must not use custom routing. + // If successful, the request removes the alias and creates a data stream with + // the same name. + // The indices for the alias become hidden backing indices for the stream. + // The write index for the alias becomes the write index for the stream. // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html MigrateToDataStream indices_migrate_to_data_stream.NewMigrateToDataStream - // Modifies a data stream + // Performs one or more data stream modification actions in a single atomic + // operation. // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html ModifyDataStream indices_modify_data_stream.NewModifyDataStream - // Opens an index. + // Opens a closed index. + // For data streams, the API opens any closed backing indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-open-close.html Open indices_open.NewOpen // Promotes a data stream from a replicated data stream managed by CCR to a // regular data stream // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html PromoteDataStream indices_promote_data_stream.NewPromoteDataStream - // Creates or updates an alias. + // Adds a data stream or index to an alias. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html PutAlias indices_put_alias.NewPutAlias - // Updates the data stream lifecycle of the selected data streams. + // Update the data lifecycle of the specified data streams. // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams-put-lifecycle.html PutDataLifecycle indices_put_data_lifecycle.NewPutDataLifecycle - // Creates or updates an index template. + // Create or update an index template. + // Index templates define settings, mappings, and aliases that can be applied + // automatically to new indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-put-template.html PutIndexTemplate indices_put_index_template.NewPutIndexTemplate - // Updates the index mappings. + // Adds new fields to an existing data stream or index. + // You can also use this API to change the search settings of existing fields. + // For data streams, these changes are applied to all backing indices by + // default. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-put-mapping.html PutMapping indices_put_mapping.NewPutMapping - // Updates the index settings. + // Changes a dynamic index setting in real time. For data streams, index setting + // changes are applied to all backing indices by default. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-update-settings.html PutSettings indices_put_settings.NewPutSettings - // Creates or updates an index template. + // Create or update an index template. + // Index templates define settings, mappings, and aliases that can be applied + // automatically to new indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates-v1.html PutTemplate indices_put_template.NewPutTemplate - // Returns information about ongoing index shard recoveries. + // Returns information about ongoing and completed shard recoveries for one or + // more indices. + // For data streams, the API returns information for the stream’s backing + // indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-recovery.html Recovery indices_recovery.NewRecovery - // Performs the refresh operation in one or more indices. + // A refresh makes recent operations performed on one or more indices available + // for search. + // For data streams, the API runs the refresh operation on the stream’s backing + // indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-refresh.html Refresh indices_refresh.NewRefresh // Reloads an index's search analyzers and their resources. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-reload-analyzers.html ReloadSearchAnalyzers indices_reload_search_analyzers.NewReloadSearchAnalyzers // Resolves the specified index expressions to return information about each - // cluster, including the local cluster, if included. + // cluster, including + // the local cluster, if included. + // Multiple patterns and remote clusters are supported. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-resolve-cluster-api.html ResolveCluster indices_resolve_cluster.NewResolveCluster - // Returns information about any matching indices, aliases, and data streams + // Resolves the specified name(s) and/or index patterns for indices, aliases, + // and data streams. + // Multiple patterns and remote clusters are supported. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-resolve-index-api.html ResolveIndex indices_resolve_index.NewResolveIndex - // Updates an alias to point to a new index when the existing index - // is considered to be too large or too old. + // Creates a new index for a data stream or index alias. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-rollover-index.html Rollover indices_rollover.NewRollover - // Provides low-level information about segments in a Lucene index. + // Returns low-level information about the Lucene segments in index shards. + // For data streams, the API returns information about the stream’s backing + // indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-segments.html Segments indices_segments.NewSegments - // Provides store information for shard copies of indices. + // Retrieves store information about replica shards in one or more indices. + // For data streams, the API retrieves store information for the stream’s + // backing indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-shards-stores.html ShardStores indices_shard_stores.NewShardStores - // Allow to shrink an existing index into a new index with fewer primary shards. + // Shrinks an existing index into a new index with fewer primary shards. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-shrink-index.html Shrink indices_shrink.NewShrink - // Simulate matching the given index name against the index templates in the - // system + // Simulate an index. + // Returns the index configuration that would be applied to the specified index + // from an existing index template. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-simulate-index.html SimulateIndexTemplate indices_simulate_index_template.NewSimulateIndexTemplate - // Simulate resolving the given template name or body + // Simulate an index template. + // Returns the index configuration that would be applied by a particular index + // template. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-simulate-template.html SimulateTemplate indices_simulate_template.NewSimulateTemplate - // Allows you to split an existing index into a new index with more primary - // shards. + // Splits an existing index into a new index with more primary shards. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-split-index.html Split indices_split.NewSplit - // Provides statistics on operations happening in an index. + // Returns statistics for one or more indices. + // For data streams, the API retrieves statistics for the stream’s backing + // indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-stats.html Stats indices_stats.NewStats - // Unfreezes an index. When a frozen index is unfrozen, the index goes through - // the normal recovery process and becomes writeable again. + // Unfreezes an index. // https://www.elastic.co/guide/en/elasticsearch/reference/current/unfreeze-index-api.html Unfreeze indices_unfreeze.NewUnfreeze - // Updates index aliases. + // Adds a data stream or index to an alias. // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html UpdateAliases indices_update_aliases.NewUpdateAliases - // Allows a user to validate a potentially expensive query without executing it. + // Validates a potentially expensive query without executing it. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-validate.html ValidateQuery indices_validate_query.NewValidateQuery } type Inference struct { - // Delete model in the Inference API + // Delete an inference endpoint // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-inference-api.html - DeleteModel inference_delete_model.NewDeleteModel - // Get a model in the Inference API + Delete inference_delete.NewDelete + // Get an inference endpoint // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-inference-api.html - GetModel inference_get_model.NewGetModel - // Perform inference on a model + Get inference_get.NewGet + // Perform inference on the service // https://www.elastic.co/guide/en/elasticsearch/reference/current/post-inference-api.html Inference inference_inference.NewInference - // Configure a model for use in the Inference API + // Create an inference endpoint // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-inference-api.html - PutModel inference_put_model.NewPutModel + Put inference_put.NewPut } type Ingest struct { - // Deletes a pipeline. + // Deletes one or more existing ingest pipeline. // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-pipeline-api.html DeletePipeline ingest_delete_pipeline.NewDeletePipeline - // Returns statistical information about geoip databases + // Gets download statistics for GeoIP2 databases used with the geoip processor. // https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html GeoIpStats ingest_geo_ip_stats.NewGeoIpStats - // Returns a pipeline. + // Returns information about one or more ingest pipelines. + // This API returns a local reference of the pipeline. // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-pipeline-api.html GetPipeline ingest_get_pipeline.NewGetPipeline - // Returns a list of the built-in patterns. + // Extracts structured fields out of a single text field within a document. + // You choose which field to extract matched fields from, as well as the grok + // pattern you expect will match. + // A grok pattern is like a regular expression that supports aliased expressions + // that can be reused. // https://www.elastic.co/guide/en/elasticsearch/reference/current/grok-processor.html ProcessorGrok ingest_processor_grok.NewProcessorGrok - // Creates or updates a pipeline. + // Creates or updates an ingest pipeline. + // Changes made using this API take effect immediately. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest.html PutPipeline ingest_put_pipeline.NewPutPipeline - // Allows to simulate a pipeline with example documents. + // Executes an ingest pipeline against a set of provided documents. // https://www.elastic.co/guide/en/elasticsearch/reference/current/simulate-pipeline-api.html Simulate ingest_simulate.NewSimulate } @@ -1220,7 +1532,10 @@ type License struct { // Deletes licensing information for the cluster // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-license.html Delete license_delete.NewDelete - // Retrieves licensing information for the cluster + // This API returns information about the type of license, when it was issued, + // and when it expires, for example. + // For more information about the different types of licenses, see + // https://www.elastic.co/subscriptions. // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-license.html Get license_get.NewGet // Retrieves information about the status of the basic license. @@ -1232,22 +1547,29 @@ type License struct { // Updates the license for the cluster. // https://www.elastic.co/guide/en/elasticsearch/reference/current/update-license.html Post license_post.NewPost - // Starts an indefinite basic license. + // The start basic API enables you to initiate an indefinite basic license, + // which gives access to all the basic features. If the basic license does not + // support all of the features that are available with your current license, + // however, you are notified in the response. You must then re-submit the API + // request with the acknowledge parameter set to true. + // To check the status of your basic license, use the following API: [Get basic + // status](https://www.elastic.co/guide/en/elasticsearch/reference/current/get-basic-status.html). // https://www.elastic.co/guide/en/elasticsearch/reference/current/start-basic.html PostStartBasic license_post_start_basic.NewPostStartBasic - // starts a limited time trial license. + // The start trial API enables you to start a 30-day trial, which gives access + // to all subscription features. // https://www.elastic.co/guide/en/elasticsearch/reference/current/start-trial.html PostStartTrial license_post_start_trial.NewPostStartTrial } type Logstash struct { - // Deletes Logstash Pipelines used by Central Management + // Deletes a pipeline used for Logstash Central Management. // https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-api-delete-pipeline.html DeletePipeline logstash_delete_pipeline.NewDeletePipeline - // Retrieves Logstash Pipelines used by Central Management + // Retrieves pipelines used for Logstash Central Management. // https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-api-get-pipeline.html GetPipeline logstash_get_pipeline.NewGetPipeline - // Adds and updates Logstash Pipelines used for Central Management + // Creates or updates a pipeline used for Logstash Central Management. // https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-api-put-pipeline.html PutPipeline logstash_put_pipeline.NewPutPipeline } @@ -1267,14 +1589,34 @@ type Migration struct { } type Ml struct { - // Clear the cached results from a trained model deployment + // Clears a trained model deployment cache on all nodes where the trained model + // is assigned. + // A trained model deployment may have an inference cache enabled. + // As requests are handled by each allocated node, their responses may be cached + // on that individual node. + // Calling this API clears the caches without restarting the deployment. // https://www.elastic.co/guide/en/elasticsearch/reference/current/clear-trained-model-deployment-cache.html ClearTrainedModelDeploymentCache ml_clear_trained_model_deployment_cache.NewClearTrainedModelDeploymentCache - // Closes one or more anomaly detection jobs. A job can be opened and closed - // multiple times throughout its lifecycle. + // Close anomaly detection jobs + // A job can be opened and closed multiple times throughout its lifecycle. A + // closed job cannot receive data or perform analysis operations, but you can + // still explore and navigate results. + // When you close a job, it runs housekeeping tasks such as pruning the model + // history, flushing buffers, calculating final results and persisting the model + // snapshots. Depending upon the size of the job, it could take several minutes + // to close and the equivalent time to re-open. After it is closed, the job has + // a minimal overhead on the cluster except for maintaining its meta data. + // Therefore it is a best practice to close jobs that are no longer required to + // process data. + // If you close an anomaly detection job whose datafeed is running, the request + // first tries to stop the datafeed. This behavior is equivalent to calling stop + // datafeed API with the same timeout and force parameters as the close job + // request. + // When a datafeed that has a specified end date stops, it automatically closes + // its associated job. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-close-job.html CloseJob ml_close_job.NewCloseJob - // Deletes a calendar. + // Removes all scheduled events from a calendar, then deletes it. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-calendar.html DeleteCalendar ml_delete_calendar.NewDeleteCalendar // Deletes scheduled events from a calendar. @@ -1283,51 +1625,105 @@ type Ml struct { // Deletes anomaly detection jobs from a calendar. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-calendar-job.html DeleteCalendarJob ml_delete_calendar_job.NewDeleteCalendarJob - // Deletes an existing data frame analytics job. + // Deletes a data frame analytics job. // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-dfanalytics.html DeleteDataFrameAnalytics ml_delete_data_frame_analytics.NewDeleteDataFrameAnalytics // Deletes an existing datafeed. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-datafeed.html DeleteDatafeed ml_delete_datafeed.NewDeleteDatafeed // Deletes expired and unused machine learning data. + // Deletes all job results, model snapshots and forecast data that have exceeded + // their retention days period. Machine learning state documents that are not + // associated with any job are also deleted. + // You can limit the request to a single or set of anomaly detection jobs by + // using a job identifier, a group name, a comma-separated list of jobs, or a + // wildcard expression. You can delete expired data for all anomaly detection + // jobs by using _all, by specifying * as the , or by omitting the + // . // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-expired-data.html DeleteExpiredData ml_delete_expired_data.NewDeleteExpiredData // Deletes a filter. + // If an anomaly detection job references the filter, you cannot delete the + // filter. You must update or delete the job before you can delete the filter. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-filter.html DeleteFilter ml_delete_filter.NewDeleteFilter // Deletes forecasts from a machine learning job. + // By default, forecasts are retained for 14 days. You can specify a + // different retention period with the `expires_in` parameter in the forecast + // jobs API. The delete forecast API enables you to delete one or more + // forecasts before they expire. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-forecast.html DeleteForecast ml_delete_forecast.NewDeleteForecast - // Deletes an existing anomaly detection job. + // Deletes an anomaly detection job. + // + // All job configuration, model state and results are deleted. + // It is not currently possible to delete multiple jobs using wildcards or a + // comma separated list. If you delete a job that has a datafeed, the request + // first tries to delete the datafeed. This behavior is equivalent to calling + // the delete datafeed API with the same timeout and force parameters as the + // delete job request. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-job.html DeleteJob ml_delete_job.NewDeleteJob // Deletes an existing model snapshot. + // You cannot delete the active model snapshot. To delete that snapshot, first + // revert to a different one. To identify the active model snapshot, refer to + // the `model_snapshot_id` in the results from the get jobs API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-snapshot.html DeleteModelSnapshot ml_delete_model_snapshot.NewDeleteModelSnapshot // Deletes an existing trained inference model that is currently not referenced // by an ingest pipeline. // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-trained-models.html DeleteTrainedModel ml_delete_trained_model.NewDeleteTrainedModel - // Deletes a model alias that refers to the trained model + // Deletes a trained model alias. + // This API deletes an existing model alias that refers to a trained model. If + // the model alias is missing or refers to a model other than the one identified + // by the `model_id`, this API returns an error. // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-trained-models-aliases.html DeleteTrainedModelAlias ml_delete_trained_model_alias.NewDeleteTrainedModelAlias - // Estimates the model memory + // Makes an estimation of the memory usage for an anomaly detection job model. + // It is based on analysis configuration details for the job and cardinality + // estimates for the fields it references. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-apis.html EstimateModelMemory ml_estimate_model_memory.NewEstimateModelMemory // Evaluates the data frame analytics for an annotated index. + // The API packages together commonly used evaluation metrics for various types + // of machine learning features. This has been designed for use on indexes + // created by data frame analytics. Evaluation requires both a ground truth + // field and an analytics result field to be present. // https://www.elastic.co/guide/en/elasticsearch/reference/current/evaluate-dfanalytics.html EvaluateDataFrame ml_evaluate_data_frame.NewEvaluateDataFrame // Explains a data frame analytics config. + // This API provides explanations for a data frame analytics config that either + // exists already or one that has not been created yet. The following + // explanations are provided: + // * which fields are included or not in the analysis and why, + // * how much memory is estimated to be required. The estimate can be used when + // deciding the appropriate value for model_memory_limit setting later on. + // If you have object fields or fields that are excluded via source filtering, + // they are not included in the explanation. // http://www.elastic.co/guide/en/elasticsearch/reference/current/explain-dfanalytics.html ExplainDataFrameAnalytics ml_explain_data_frame_analytics.NewExplainDataFrameAnalytics // Forces any buffered data to be processed by the job. + // The flush jobs API is only applicable when sending data for analysis using + // the post data API. Depending on the content of the buffer, then it might + // additionally calculate new results. Both flush and close operations are + // similar, however the flush is more efficient if you are expecting to send + // more data for analysis. When flushing, the job remains open and is available + // to continue analyzing data. A close operation additionally prunes and + // persists the model state to disk and the job must be opened again before + // analyzing further data. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-flush-job.html FlushJob ml_flush_job.NewFlushJob // Predicts the future behavior of a time series by using its historical // behavior. + // + // Forecasts are not supported for jobs that perform population analysis; an + // error occurs if you try to create a forecast for a job that has an + // `over_field_name` in its configuration. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-forecast.html Forecast ml_forecast.NewForecast // Retrieves anomaly detection job results for one or more buckets. + // The API presents a chronological view of the records, grouped by bucket. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-bucket.html GetBuckets ml_get_buckets.NewGetBuckets // Retrieves information about the scheduled events in calendars. @@ -1340,34 +1736,58 @@ type Ml struct { // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-category.html GetCategories ml_get_categories.NewGetCategories // Retrieves configuration information for data frame analytics jobs. + // You can get information for multiple data frame analytics jobs in a single + // API request by using a comma-separated list of data frame analytics jobs or a + // wildcard expression. // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-dfanalytics.html GetDataFrameAnalytics ml_get_data_frame_analytics.NewGetDataFrameAnalytics // Retrieves usage information for data frame analytics jobs. // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-dfanalytics-stats.html GetDataFrameAnalyticsStats ml_get_data_frame_analytics_stats.NewGetDataFrameAnalyticsStats // Retrieves usage information for datafeeds. + // You can get statistics for multiple datafeeds in a single API request by + // using a comma-separated list of datafeeds or a wildcard expression. You can + // get statistics for all datafeeds by using `_all`, by specifying `*` as the + // ``, or by omitting the ``. If the datafeed is stopped, the + // only information you receive is the `datafeed_id` and the `state`. + // This API returns a maximum of 10,000 datafeeds. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed-stats.html GetDatafeedStats ml_get_datafeed_stats.NewGetDatafeedStats // Retrieves configuration information for datafeeds. + // You can get information for multiple datafeeds in a single API request by + // using a comma-separated list of datafeeds or a wildcard expression. You can + // get information for all datafeeds by using `_all`, by specifying `*` as the + // ``, or by omitting the ``. + // This API returns a maximum of 10,000 datafeeds. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed.html GetDatafeeds ml_get_datafeeds.NewGetDatafeeds // Retrieves filters. + // You can get a single filter or all filters. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-filter.html GetFilters ml_get_filters.NewGetFilters // Retrieves anomaly detection job results for one or more influencers. + // Influencers are the entities that have contributed to, or are to blame for, + // the anomalies. Influencer results are available only if an + // `influencer_field_name` is specified in the job configuration. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-influencer.html GetInfluencers ml_get_influencers.NewGetInfluencers // Retrieves usage information for anomaly detection jobs. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-stats.html GetJobStats ml_get_job_stats.NewGetJobStats // Retrieves configuration information for anomaly detection jobs. + // You can get information for multiple anomaly detection jobs in a single API + // request by using a group name, a comma-separated list of jobs, or a wildcard + // expression. You can get information for all anomaly detection jobs by using + // `_all`, by specifying `*` as the ``, or by omitting the ``. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job.html GetJobs ml_get_jobs.NewGetJobs - // Returns information on how ML is using memory. + // Get information about how machine learning jobs and trained models are using + // memory, + // on each node, both within the JVM heap, and natively, outside of the JVM. // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-ml-memory.html GetMemoryStats ml_get_memory_stats.NewGetMemoryStats - // Gets stats for anomaly detection job model snapshot upgrades that are in - // progress. + // Retrieves usage information for anomaly detection job model snapshot + // upgrades. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-model-snapshot-upgrade-stats.html GetModelSnapshotUpgradeStats ml_get_model_snapshot_upgrade_stats.NewGetModelSnapshotUpgradeStats // Retrieves information about model snapshots. @@ -1375,104 +1795,267 @@ type Ml struct { GetModelSnapshots ml_get_model_snapshots.NewGetModelSnapshots // Retrieves overall bucket results that summarize the bucket results of // multiple anomaly detection jobs. + // + // The `overall_score` is calculated by combining the scores of all the + // buckets within the overall bucket span. First, the maximum + // `anomaly_score` per anomaly detection job in the overall bucket is + // calculated. Then the `top_n` of those scores are averaged to result in + // the `overall_score`. This means that you can fine-tune the + // `overall_score` so that it is more or less sensitive to the number of + // jobs that detect an anomaly at the same time. For example, if you set + // `top_n` to `1`, the `overall_score` is the maximum bucket score in the + // overall bucket. Alternatively, if you set `top_n` to the number of jobs, + // the `overall_score` is high only when all jobs detect anomalies in that + // overall bucket. If you set the `bucket_span` parameter (to a value + // greater than its default), the `overall_score` is the maximum + // `overall_score` of the overall buckets that have a span equal to the + // jobs' largest bucket span. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-overall-buckets.html GetOverallBuckets ml_get_overall_buckets.NewGetOverallBuckets // Retrieves anomaly records for an anomaly detection job. + // Records contain the detailed analytical results. They describe the anomalous + // activity that has been identified in the input data based on the detector + // configuration. + // There can be many anomaly records depending on the characteristics and size + // of the input data. In practice, there are often too many to be able to + // manually process them. The machine learning features therefore perform a + // sophisticated aggregation of the anomaly records into buckets. + // The number of record results depends on the number of anomalies found in each + // bucket, which relates to the number of time series being modeled and the + // number of detectors. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-record.html GetRecords ml_get_records.NewGetRecords - // Retrieves configuration information for a trained inference model. + // Retrieves configuration information for a trained model. // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-trained-models.html GetTrainedModels ml_get_trained_models.NewGetTrainedModels - // Retrieves usage information for trained inference models. + // Retrieves usage information for trained models. You can get usage information + // for multiple trained + // models in a single API request by using a comma-separated list of model IDs + // or a wildcard expression. // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-trained-models-stats.html GetTrainedModelsStats ml_get_trained_models_stats.NewGetTrainedModelsStats - // Evaluate a trained model. + // Evaluates a trained model. // https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-trained-model.html InferTrainedModel ml_infer_trained_model.NewInferTrainedModel // Returns defaults and limits used by machine learning. + // This endpoint is designed to be used by a user interface that needs to fully + // understand machine learning configurations where some options are not + // specified, meaning that the defaults should be used. This endpoint may be + // used to find out what those defaults are. It also provides information about + // the maximum size of machine learning jobs that could run in the current + // cluster configuration. // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-ml-info.html Info ml_info.NewInfo // Opens one or more anomaly detection jobs. + // An anomaly detection job must be opened in order for it to be ready to + // receive and analyze data. It can be opened and closed multiple times + // throughout its lifecycle. + // When you open a new job, it starts with an empty model. + // When you open an existing job, the most recent model state is automatically + // loaded. The job is ready to resume its analysis from where it left off, once + // new data is received. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-open-job.html OpenJob ml_open_job.NewOpenJob - // Posts scheduled events in a calendar. + // Adds scheduled events to a calendar. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-calendar-event.html PostCalendarEvents ml_post_calendar_events.NewPostCalendarEvents // Sends data to an anomaly detection job for analysis. + // + // IMPORTANT: For each job, data can be accepted from only a single connection + // at a time. + // It is not currently possible to post data to multiple jobs using wildcards or + // a comma-separated list. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-data.html PostData ml_post_data.NewPostData - // Previews that will be analyzed given a data frame analytics config. + // Previews the extracted features used by a data frame analytics config. // http://www.elastic.co/guide/en/elasticsearch/reference/current/preview-dfanalytics.html PreviewDataFrameAnalytics ml_preview_data_frame_analytics.NewPreviewDataFrameAnalytics // Previews a datafeed. + // This API returns the first "page" of search results from a datafeed. + // You can preview an existing datafeed or provide configuration details for a + // datafeed + // and anomaly detection job in the API. The preview shows the structure of the + // data + // that will be passed to the anomaly detection engine. + // IMPORTANT: When Elasticsearch security features are enabled, the preview uses + // the credentials of the user that + // called the API. However, when the datafeed starts it uses the roles of the + // last user that created or updated the + // datafeed. To get a preview that accurately reflects the behavior of the + // datafeed, use the appropriate credentials. + // You can also use secondary authorization headers to supply the credentials. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-preview-datafeed.html PreviewDatafeed ml_preview_datafeed.NewPreviewDatafeed - // Instantiates a calendar. + // Creates a calendar. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-calendar.html PutCalendar ml_put_calendar.NewPutCalendar // Adds an anomaly detection job to a calendar. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-calendar-job.html PutCalendarJob ml_put_calendar_job.NewPutCalendarJob // Instantiates a data frame analytics job. + // This API creates a data frame analytics job that performs an analysis on the + // source indices and stores the outcome in a destination index. // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-dfanalytics.html PutDataFrameAnalytics ml_put_data_frame_analytics.NewPutDataFrameAnalytics // Instantiates a datafeed. + // Datafeeds retrieve data from Elasticsearch for analysis by an anomaly + // detection job. + // You can associate only one datafeed with each anomaly detection job. + // The datafeed contains a query that runs at a defined interval (`frequency`). + // If you are concerned about delayed data, you can add a delay (`query_delay') + // at each interval. + // When Elasticsearch security features are enabled, your datafeed remembers + // which roles the user who created it had + // at the time of creation and runs the query using those same roles. If you + // provide secondary authorization headers, + // those credentials are used instead. + // You must use Kibana, this API, or the create anomaly detection jobs API to + // create a datafeed. Do not add a datafeed + // directly to the `.ml-config` index. Do not give users `write` privileges on + // the `.ml-config` index. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-datafeed.html PutDatafeed ml_put_datafeed.NewPutDatafeed // Instantiates a filter. + // A filter contains a list of strings. It can be used by one or more anomaly + // detection jobs. + // Specifically, filters are referenced in the `custom_rules` property of + // detector configuration objects. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-filter.html PutFilter ml_put_filter.NewPutFilter - // Instantiates an anomaly detection job. + // Instantiates an anomaly detection job. If you include a `datafeed_config`, + // you must have read index privileges on the source index. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-job.html PutJob ml_put_job.NewPutJob - // Creates an inference trained model. + // Enables you to supply a trained model that is not created by data frame + // analytics. // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-trained-models.html PutTrainedModel ml_put_trained_model.NewPutTrainedModel - // Creates a new model alias (or reassigns an existing one) to refer to the - // trained model + // Creates or updates a trained model alias. A trained model alias is a logical + // name used to reference a single trained model. + // You can use aliases instead of trained model identifiers to make it easier to + // reference your models. For example, you can use aliases in inference + // aggregations and processors. + // An alias must be unique and refer to only a single trained model. However, + // you can have multiple aliases for each trained model. + // If you use this API to update an alias such that it references a different + // trained model ID and the model uses a different type of data frame analytics, + // an error occurs. For example, this situation occurs if you have a trained + // model for regression analysis and a trained model for classification + // analysis; you cannot reassign an alias from one type of trained model to + // another. + // If you use this API to update an alias and there are very few input fields in + // common between the old and new trained models for the model alias, the API + // returns a warning. // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-trained-models-aliases.html PutTrainedModelAlias ml_put_trained_model_alias.NewPutTrainedModelAlias - // Creates part of a trained model definition + // Creates part of a trained model definition. // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-trained-model-definition-part.html PutTrainedModelDefinitionPart ml_put_trained_model_definition_part.NewPutTrainedModelDefinitionPart - // Creates a trained model vocabulary + // Creates a trained model vocabulary. + // This API is supported only for natural language processing (NLP) models. + // The vocabulary is stored in the index as described in + // `inference_config.*.vocabulary` of the trained model definition. // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-trained-model-vocabulary.html PutTrainedModelVocabulary ml_put_trained_model_vocabulary.NewPutTrainedModelVocabulary - // Resets an existing anomaly detection job. + // Resets an anomaly detection job. + // All model state and results are deleted. The job is ready to start over as if + // it had just been created. + // It is not currently possible to reset multiple jobs using wildcards or a + // comma separated list. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-reset-job.html ResetJob ml_reset_job.NewResetJob // Reverts to a specific snapshot. + // The machine learning features react quickly to anomalous input, learning new + // behaviors in data. Highly anomalous input increases the variance in the + // models whilst the system learns whether this is a new step-change in behavior + // or a one-off event. In the case where this anomalous input is known to be a + // one-off, then it might be appropriate to reset the model state to a time + // before this event. For example, you might consider reverting to a saved + // snapshot after Black Friday or a critical system failure. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-revert-snapshot.html RevertModelSnapshot ml_revert_model_snapshot.NewRevertModelSnapshot // Sets a cluster wide upgrade_mode setting that prepares machine learning // indices for an upgrade. + // When upgrading your cluster, in some circumstances you must restart your + // nodes and reindex your machine learning indices. In those circumstances, + // there must be no machine learning jobs running. You can close the machine + // learning jobs, do the upgrade, then open all the jobs again. Alternatively, + // you can use this API to temporarily halt tasks associated with the jobs and + // datafeeds and prevent new jobs from opening. You can also use this API + // during upgrades that do not require you to reindex your machine learning + // indices, though stopping jobs is not a requirement in that case. + // You can see the current value for the upgrade_mode setting by using the get + // machine learning info API. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-set-upgrade-mode.html SetUpgradeMode ml_set_upgrade_mode.NewSetUpgradeMode // Starts a data frame analytics job. + // A data frame analytics job can be started and stopped multiple times + // throughout its lifecycle. + // If the destination index does not exist, it is created automatically the + // first time you start the data frame analytics job. The + // `index.number_of_shards` and `index.number_of_replicas` settings for the + // destination index are copied from the source index. If there are multiple + // source indices, the destination index copies the highest setting values. The + // mappings for the destination index are also copied from the source indices. + // If there are any mapping conflicts, the job fails to start. + // If the destination index exists, it is used as is. You can therefore set up + // the destination index in advance with custom settings and mappings. // https://www.elastic.co/guide/en/elasticsearch/reference/current/start-dfanalytics.html StartDataFrameAnalytics ml_start_data_frame_analytics.NewStartDataFrameAnalytics // Starts one or more datafeeds. + // + // A datafeed must be started in order to retrieve data from Elasticsearch. A + // datafeed can be started and stopped + // multiple times throughout its lifecycle. + // + // Before you can start a datafeed, the anomaly detection job must be open. + // Otherwise, an error occurs. + // + // If you restart a stopped datafeed, it continues processing input data from + // the next millisecond after it was stopped. + // If new data was indexed for that exact millisecond between stopping and + // starting, it will be ignored. + // + // When Elasticsearch security features are enabled, your datafeed remembers + // which roles the last user to create or + // update it had at the time of creation or update and runs the query using + // those same roles. If you provided secondary + // authorization headers when you created or updated the datafeed, those + // credentials are used instead. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-start-datafeed.html StartDatafeed ml_start_datafeed.NewStartDatafeed - // Start a trained model deployment. + // Starts a trained model deployment, which allocates the model to every machine + // learning node. // https://www.elastic.co/guide/en/elasticsearch/reference/current/start-trained-model-deployment.html StartTrainedModelDeployment ml_start_trained_model_deployment.NewStartTrainedModelDeployment // Stops one or more data frame analytics jobs. + // A data frame analytics job can be started and stopped multiple times + // throughout its lifecycle. // https://www.elastic.co/guide/en/elasticsearch/reference/current/stop-dfanalytics.html StopDataFrameAnalytics ml_stop_data_frame_analytics.NewStopDataFrameAnalytics // Stops one or more datafeeds. + // A datafeed that is stopped ceases to retrieve data from Elasticsearch. A + // datafeed can be started and stopped + // multiple times throughout its lifecycle. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-stop-datafeed.html StopDatafeed ml_stop_datafeed.NewStopDatafeed - // Stop a trained model deployment. + // Stops a trained model deployment. // https://www.elastic.co/guide/en/elasticsearch/reference/current/stop-trained-model-deployment.html StopTrainedModelDeployment ml_stop_trained_model_deployment.NewStopTrainedModelDeployment - // Updates certain properties of a data frame analytics job. + // Updates an existing data frame analytics job. // https://www.elastic.co/guide/en/elasticsearch/reference/current/update-dfanalytics.html UpdateDataFrameAnalytics ml_update_data_frame_analytics.NewUpdateDataFrameAnalytics - // Updates certain properties of a datafeed. + // Updates the properties of a datafeed. + // You must stop and start the datafeed for the changes to be applied. + // When Elasticsearch security features are enabled, your datafeed remembers + // which roles the user who updated it had at + // the time of the update and runs the query using those same roles. If you + // provide secondary authorization headers, + // those credentials are used instead. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-datafeed.html UpdateDatafeed ml_update_datafeed.NewUpdateDatafeed - // Updates the description of a filter, adds items, or removes items. + // Updates the description of a filter, adds items, or removes items from the + // list. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-filter.html UpdateFilter ml_update_filter.NewUpdateFilter // Updates certain properties of an anomaly detection job. @@ -1481,7 +2064,19 @@ type Ml struct { // Updates certain properties of a snapshot. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-snapshot.html UpdateModelSnapshot ml_update_model_snapshot.NewUpdateModelSnapshot - // Upgrades a given job snapshot to the current major version. + // Starts a trained model deployment, which allocates the model to every machine + // learning node. + // https://www.elastic.co/guide/en/elasticsearch/reference/current/update-trained-model-deployment.html + UpdateTrainedModelDeployment ml_update_trained_model_deployment.NewUpdateTrainedModelDeployment + // Upgrades an anomaly detection model snapshot to the latest major version. + // Over time, older snapshot formats are deprecated and removed. Anomaly + // detection jobs support only snapshots that are from the current or previous + // major version. + // This API provides a means to upgrade a snapshot to the current major version. + // This aids in preparing the cluster for an upgrade to the next major version. + // Only one snapshot per anomaly detection job can be upgraded at a time and the + // upgraded snapshot cannot be the current snapshot of the anomaly detection + // job. // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-upgrade-job-model-snapshot.html UpgradeJobSnapshot ml_upgrade_job_snapshot.NewUpgradeJobSnapshot // Validates an anomaly detection job. @@ -1499,43 +2094,76 @@ type Monitoring struct { } type Nodes struct { - // Removes the archived repositories metering information present in the - // cluster. + // You can use this API to clear the archived repositories metering information + // in the cluster. // https://www.elastic.co/guide/en/elasticsearch/reference/current/clear-repositories-metering-archive-api.html ClearRepositoriesMeteringArchive nodes_clear_repositories_metering_archive.NewClearRepositoriesMeteringArchive - // Returns cluster repositories metering information. + // You can use the cluster repositories metering API to retrieve repositories + // metering information in a cluster. + // This API exposes monotonically non-decreasing counters and it’s expected that + // clients would durably store the + // information needed to compute aggregations over a period of time. + // Additionally, the information exposed by this + // API is volatile, meaning that it won’t be present after node restarts. // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-repositories-metering-api.html GetRepositoriesMeteringInfo nodes_get_repositories_metering_info.NewGetRepositoriesMeteringInfo - // Returns information about hot threads on each node in the cluster. + // This API yields a breakdown of the hot threads on each selected node in the + // cluster. + // The output is plain text with a breakdown of each node’s top hot threads. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-nodes-hot-threads.html HotThreads nodes_hot_threads.NewHotThreads - // Returns information about nodes in the cluster. + // Returns cluster nodes information. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-nodes-info.html Info nodes_info.NewInfo - // Reloads secure settings. + // Reloads the keystore on nodes in the cluster. // https://www.elastic.co/guide/en/elasticsearch/reference/current/secure-settings.html#reloadable-secure-settings ReloadSecureSettings nodes_reload_secure_settings.NewReloadSecureSettings - // Returns statistical information about nodes in the cluster. + // Returns cluster nodes statistics. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-nodes-stats.html Stats nodes_stats.NewStats - // Returns low-level information about REST actions usage on nodes. + // Returns information on the usage of features. // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-nodes-usage.html Usage nodes_usage.NewUsage } -type QueryRuleset struct { +type Profiling struct { + // Extracts a UI-optimized structure to render flamegraphs from Universal + // Profiling. + // https://www.elastic.co/guide/en/observability/current/universal-profiling.html + Flamegraph profiling_flamegraph.NewFlamegraph + // Extracts raw stacktrace information from Universal Profiling. + // https://www.elastic.co/guide/en/observability/current/universal-profiling.html + Stacktraces profiling_stacktraces.NewStacktraces + // Returns basic information about the status of Universal Profiling. + // https://www.elastic.co/guide/en/observability/current/universal-profiling.html + Status profiling_status.NewStatus + // Extracts a list of topN functions from Universal Profiling. + // https://www.elastic.co/guide/en/observability/current/universal-profiling.html + TopnFunctions profiling_topn_functions.NewTopnFunctions +} + +type QueryRules struct { + // Deletes a query rule within a query ruleset. + // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-query-rule.html + DeleteRule query_rules_delete_rule.NewDeleteRule // Deletes a query ruleset. // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-query-ruleset.html - Delete query_ruleset_delete.NewDelete - // Returns the details about a query ruleset. + DeleteRuleset query_rules_delete_ruleset.NewDeleteRuleset + // Returns the details about a query rule within a query ruleset + // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-query-rule.html + GetRule query_rules_get_rule.NewGetRule + // Returns the details about a query ruleset // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-query-ruleset.html - Get query_ruleset_get.NewGet - // Lists query rulesets. + GetRuleset query_rules_get_ruleset.NewGetRuleset + // Returns summarized information about existing query rulesets. // https://www.elastic.co/guide/en/elasticsearch/reference/current/list-query-rulesets.html - List query_ruleset_list.NewList + ListRulesets query_rules_list_rulesets.NewListRulesets + // Creates or updates a query rule within a query ruleset. + // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-query-rule.html + PutRule query_rules_put_rule.NewPutRule // Creates or updates a query ruleset. // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-query-ruleset.html - Put query_ruleset_put.NewPut + PutRuleset query_rules_put_ruleset.NewPutRuleset } type Rollup struct { @@ -1549,14 +2177,14 @@ type Rollup struct { // specific index or index pattern. // https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-get-rollup-caps.html GetRollupCaps rollup_get_rollup_caps.NewGetRollupCaps - // Returns the rollup capabilities of all jobs inside of a rollup index (e.g. - // the index where rollup data is stored). + // Returns the rollup capabilities of all jobs inside of a rollup index (for + // example, the index where rollup data is stored). // https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-get-rollup-index-caps.html GetRollupIndexCaps rollup_get_rollup_index_caps.NewGetRollupIndexCaps // Creates a rollup job. // https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-put-job.html PutJob rollup_put_job.NewPutJob - // Enables searching rolled-up data using the standard query DSL. + // Enables searching rolled-up data using the standard Query DSL. // https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-search.html RollupSearch rollup_rollup_search.NewRollupSearch // Starts an existing, stopped rollup job. @@ -1574,7 +2202,7 @@ type SearchApplication struct { // Delete a behavioral analytics collection. // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-analytics-collection.html DeleteBehavioralAnalytics search_application_delete_behavioral_analytics.NewDeleteBehavioralAnalytics - // Returns the details about a search application. + // Returns the details about a search application // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-search-application.html Get search_application_get.NewGet // Returns the existing behavioral analytics collections. @@ -1589,7 +2217,7 @@ type SearchApplication struct { // Creates a behavioral analytics collection. // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-analytics-collection.html PutBehavioralAnalytics search_application_put_behavioral_analytics.NewPutBehavioralAnalytics - // Perform a search against a search application + // Perform a search against a search application. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-application-search.html Search search_application_search.NewSearch } @@ -1610,11 +2238,16 @@ type SearchableSnapshots struct { } type Security struct { - // Creates or updates the user profile on behalf of another user. + // Creates or updates a user profile on behalf of another user. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-activate-user-profile.html ActivateUserProfile security_activate_user_profile.NewActivateUserProfile - // Enables authentication as a user and retrieve information about the - // authenticated user. + // Enables you to submit a request with a basic auth header to authenticate a + // user and retrieve information about the authenticated user. + // A successful call returns a JSON structure that shows user information such + // as their username, the roles that are assigned to the user, any assigned + // metadata, and information about the realms that authenticated and authorized + // the user. + // If the user cannot be authenticated, this API returns a 401 status code. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-authenticate.html Authenticate security_authenticate.NewAuthenticate // Updates the attributes of multiple existing API keys. @@ -1623,7 +2256,9 @@ type Security struct { // Changes the passwords of users in the native realm and built-in users. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-change-password.html ChangePassword security_change_password.NewChangePassword - // Clear a subset or all entries from the API key cache. + // Evicts a subset of all entries from the API key cache. + // The cache is also automatically cleared on state changes of the security + // index. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-clear-api-key-cache.html ClearApiKeyCache security_clear_api_key_cache.NewClearApiKeyCache // Evicts application privileges from the native application privileges cache. @@ -1640,12 +2275,18 @@ type Security struct { // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-clear-service-token-caches.html ClearCachedServiceTokens security_clear_cached_service_tokens.NewClearCachedServiceTokens // Creates an API key for access without requiring basic authentication. + // A successful request returns a JSON structure that contains the API key, its + // unique id, and its name. + // If applicable, it also returns expiration information for the API key in + // milliseconds. + // NOTE: By default, API keys never expire. You can specify expiration + // information when you create the API keys. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html CreateApiKey security_create_api_key.NewCreateApiKey // Creates a cross-cluster API key for API key based remote cluster access. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-cross-cluster-api-key.html CreateCrossClusterApiKey security_create_cross_cluster_api_key.NewCreateCrossClusterApiKey - // Creates a service account token for access without requiring basic + // Creates a service accounts token for access without requiring basic // authentication. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-service-token.html CreateServiceToken security_create_service_token.NewCreateServiceToken @@ -1676,14 +2317,19 @@ type Security struct { // Enables a user profile so it's visible in user profile searches. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-enable-user-profile.html EnableUserProfile security_enable_user_profile.NewEnableUserProfile - // Allows a kibana instance to configure itself to communicate with a secured - // elasticsearch cluster. + // Enables a Kibana instance to configure itself for communication with a + // secured Elasticsearch cluster. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-kibana-enrollment.html EnrollKibana security_enroll_kibana.NewEnrollKibana - // Allows a new node to enroll to an existing cluster with security enabled. + // Allows a new node to join an existing cluster with security features enabled. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-node-enrollment.html EnrollNode security_enroll_node.NewEnrollNode // Retrieves information for one or more API keys. + // NOTE: If you have only the `manage_own_api_key` privilege, this API returns + // only the API keys that you own. + // If you have `read_security`, `manage_api_key` or greater privileges + // (including `manage_security`), this API returns all API keys regardless of + // ownership. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-api-key.html GetApiKey security_get_api_key.NewGetApiKey // Retrieves the list of cluster privileges and index privileges that are @@ -1693,13 +2339,16 @@ type Security struct { // Retrieves application privileges. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-privileges.html GetPrivileges security_get_privileges.NewGetPrivileges - // Retrieves roles in the native realm. + // The role management APIs are generally the preferred way to manage roles, + // rather than using file-based role management. + // The get roles API cannot retrieve roles that are defined in roles files. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role.html GetRole security_get_role.NewGetRole // Retrieves role mappings. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role-mapping.html GetRoleMapping security_get_role_mapping.NewGetRoleMapping - // Retrieves information about service accounts. + // This API returns a list of service accounts that match the provided path + // parameter(s). // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-service-accounts.html GetServiceAccounts security_get_service_accounts.NewGetServiceAccounts // Retrieves information of all service credentials for a service account. @@ -1717,10 +2366,32 @@ type Security struct { // Retrieves security privileges for the logged in user. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-user-privileges.html GetUserPrivileges security_get_user_privileges.NewGetUserPrivileges - // Retrieves user profiles for the given unique ID(s). + // Retrieves a user's profile using the unique profile ID. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-user-profile.html GetUserProfile security_get_user_profile.NewGetUserProfile // Creates an API key on behalf of another user. + // This API is similar to Create API keys, however it creates the API key for a + // user that is different than the user that runs the API. + // The caller must have authentication credentials (either an access token, or a + // username and password) for the user on whose behalf the API key will be + // created. + // It is not possible to use this API to create an API key without that user’s + // credentials. + // The user, for whom the authentication credentials is provided, can optionally + // "run as" (impersonate) another user. + // In this case, the API key will be created on behalf of the impersonated user. + // + // This API is intended be used by applications that need to create and manage + // API keys for end users, but cannot guarantee that those users have permission + // to create API keys on their own behalf. + // + // A successful grant API key API call returns a JSON structure that contains + // the API key, its unique id, and its name. + // If applicable, it also returns expiration information for the API key in + // milliseconds. + // + // By default, API keys never expire. You can specify expiration information + // when you create the API keys. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-grant-api-key.html GrantApiKey security_grant_api_key.NewGrantApiKey // Determines whether the specified user has a specified list of privileges. @@ -1731,6 +2402,15 @@ type Security struct { // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-has-privileges-user-profile.html HasPrivilegesUserProfile security_has_privileges_user_profile.NewHasPrivilegesUserProfile // Invalidates one or more API keys. + // The `manage_api_key` privilege allows deleting any API keys. + // The `manage_own_api_key` only allows deleting API keys that are owned by the + // user. + // In addition, with the `manage_own_api_key` privilege, an invalidation request + // must be issued in one of the three formats: + // - Set the parameter `owner=true`. + // - Or, set both `username` and `realm_name` to match the user’s identity. + // - Or, if the request is issued by an API key, i.e. an API key invalidates + // itself, specify its ID in the `ids` field. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-invalidate-api-key.html InvalidateApiKey security_invalidate_api_key.NewInvalidateApiKey // Invalidates one or more access tokens or refresh tokens. @@ -1750,7 +2430,10 @@ type Security struct { // Adds or updates application privileges. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-privileges.html PutPrivileges security_put_privileges.NewPutPrivileges - // Adds and updates roles in the native realm. + // The role management APIs are generally the preferred way to manage roles, + // rather than using file-based role management. + // The create or update roles API cannot update roles that are defined in roles + // files. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-role.html PutRole security_put_role.NewPutRole // Creates and updates role mappings. @@ -1760,39 +2443,66 @@ type Security struct { // to as native users. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-user.html PutUser security_put_user.NewPutUser - // Retrieves information for API keys using a subset of query DSL + // Retrieves information for API keys in a paginated manner. You can optionally + // filter the results with a query. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-query-api-key.html QueryApiKeys security_query_api_keys.NewQueryApiKeys - // Exchanges a SAML Response message for an Elasticsearch access token and - // refresh token pair + // Retrieves information for Users using a subset of query DSL + // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-query-user.html + QueryUser security_query_user.NewQueryUser + // Submits a SAML Response message to Elasticsearch for consumption. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-authenticate.html SamlAuthenticate security_saml_authenticate.NewSamlAuthenticate - // Verifies the logout response sent from the SAML IdP + // Verifies the logout response sent from the SAML IdP. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-complete-logout.html SamlCompleteLogout security_saml_complete_logout.NewSamlCompleteLogout - // Consumes a SAML LogoutRequest + // Submits a SAML LogoutRequest message to Elasticsearch for consumption. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-invalidate.html SamlInvalidate security_saml_invalidate.NewSamlInvalidate - // Invalidates an access token and a refresh token that were generated via the - // SAML Authenticate API + // Submits a request to invalidate an access token and refresh token. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-logout.html SamlLogout security_saml_logout.NewSamlLogout - // Creates a SAML authentication request + // Creates a SAML authentication request () as a URL string, based + // on the configuration of the respective SAML realm in Elasticsearch. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-prepare-authentication.html SamlPrepareAuthentication security_saml_prepare_authentication.NewSamlPrepareAuthentication - // Generates SAML metadata for the Elastic stack SAML 2.0 Service Provider + // Generate SAML metadata for a SAML 2.0 Service Provider. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-sp-metadata.html SamlServiceProviderMetadata security_saml_service_provider_metadata.NewSamlServiceProviderMetadata // Get suggestions for user profiles that match specified search criteria. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-suggest-user-profile.html SuggestUserProfiles security_suggest_user_profiles.NewSuggestUserProfiles // Updates attributes of an existing API key. + // Users can only update API keys that they created or that were granted to + // them. + // Use this API to update API keys created by the create API Key or grant API + // Key APIs. + // If you need to apply the same update to many API keys, you can use bulk + // update API Keys to reduce overhead. + // It’s not possible to update expired API keys, or API keys that have been + // invalidated by invalidate API Key. + // This API supports updates to an API key’s access scope and metadata. + // The access scope of an API key is derived from the `role_descriptors` you + // specify in the request, and a snapshot of the owner user’s permissions at the + // time of the request. + // The snapshot of the owner’s permissions is updated automatically on every + // call. + // If you don’t specify `role_descriptors` in the request, a call to this API + // might still change the API key’s access scope. + // This change can occur if the owner user’s permissions have changed since the + // API key was created or last modified. + // To update another user’s API key, use the `run_as` feature to submit a + // request on behalf of another user. + // IMPORTANT: It’s not possible to use an API key as the authentication + // credential for this API. + // To update an API key, the owner user’s credentials are required. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-update-api-key.html UpdateApiKey security_update_api_key.NewUpdateApiKey // Update settings for the security system index // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-update-settings.html UpdateSettings security_update_settings.NewUpdateSettings - // Update application specific data for the user profile of the given unique ID. + // Updates specific data for the user profile that's associated with the + // specified unique ID. // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-update-user-profile-data.html UpdateUserProfileData security_update_user_profile_data.NewUpdateUserProfileData } @@ -1848,7 +2558,8 @@ type Slm struct { } type Snapshot struct { - // Removes stale data from repository. + // Triggers the review of a snapshot repository’s contents and deletes any stale + // data not referenced by existing snapshots. // https://www.elastic.co/guide/en/elasticsearch/reference/current/clean-up-snapshot-repo-api.html CleanupRepository snapshot_cleanup_repository.NewCleanupRepository // Clones indices from one snapshot into another snapshot in the same @@ -1931,7 +2642,7 @@ type Synonyms struct { // Retrieves a summary of all defined synonym sets // https://www.elastic.co/guide/en/elasticsearch/reference/current/list-synonyms-sets.html GetSynonymsSets synonyms_get_synonyms_sets.NewGetSynonymsSets - // Creates or updates a synonyms set + // Creates or updates a synonym set. // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-synonyms-set.html PutSynonym synonyms_put_synonym.NewPutSynonym // Creates or updates a synonym rule in a synonym set @@ -1946,12 +2657,20 @@ type Tasks struct { // Returns information about a task. // https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html Get tasks_get.NewGet - // Returns a list of tasks. + // The task management API returns information about tasks currently executing + // on one or more nodes in the cluster. // https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html List tasks_list.NewList } type TextStructure struct { + // Finds the structure of a text field in an index. + // https://www.elastic.co/guide/en/elasticsearch/reference/current/find-field-structure.html + FindFieldStructure text_structure_find_field_structure.NewFindFieldStructure + // Finds the structure of a list of messages. The messages must contain data + // that is suitable to be ingested into Elasticsearch. + // https://www.elastic.co/guide/en/elasticsearch/reference/current/find-message-structure.html + FindMessageStructure text_structure_find_message_structure.NewFindMessageStructure // Finds the structure of a text file. The text file must contain data that is // suitable to be ingested into Elasticsearch. // https://www.elastic.co/guide/en/elasticsearch/reference/current/find-structure.html @@ -1962,9 +2681,12 @@ type TextStructure struct { } type Transform struct { - // Deletes an existing transform. + // Deletes a transform. // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-transform.html DeleteTransform transform_delete_transform.NewDeleteTransform + // Retrieves transform usage information for transform nodes. + // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-node-stats.html + GetNodeStats transform_get_node_stats.NewGetNodeStats // Retrieves configuration information for transforms. // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform.html GetTransform transform_get_transform.NewGetTransform @@ -1972,27 +2694,122 @@ type Transform struct { // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-stats.html GetTransformStats transform_get_transform_stats.NewGetTransformStats // Previews a transform. + // + // It returns a maximum of 100 results. The calculations are based on all the + // current data in the source index. It also + // generates a list of mappings and settings for the destination index. These + // values are determined based on the field + // types of the source index and the transform aggregations. // https://www.elastic.co/guide/en/elasticsearch/reference/current/preview-transform.html PreviewTransform transform_preview_transform.NewPreviewTransform - // Instantiates a transform. + // Creates a transform. + // + // A transform copies data from source indices, transforms it, and persists it + // into an entity-centric destination index. You can also think of the + // destination index as a two-dimensional tabular data structure (known as + // a data frame). The ID for each document in the data frame is generated from a + // hash of the entity, so there is a + // unique row per entity. + // + // You must choose either the latest or pivot method for your transform; you + // cannot use both in a single transform. If + // you choose to use the pivot method for your transform, the entities are + // defined by the set of `group_by` fields in + // the pivot object. If you choose to use the latest method, the entities are + // defined by the `unique_key` field values + // in the latest object. + // + // You must have `create_index`, `index`, and `read` privileges on the + // destination index and `read` and + // `view_index_metadata` privileges on the source indices. When Elasticsearch + // security features are enabled, the + // transform remembers which roles the user that created it had at the time of + // creation and uses those same roles. If + // those roles do not have the required privileges on the source and destination + // indices, the transform fails when it + // attempts unauthorized operations. + // + // NOTE: You must use Kibana or this API to create a transform. Do not add a + // transform directly into any + // `.transform-internal*` indices using the Elasticsearch index API. If + // Elasticsearch security features are enabled, do + // not give users any privileges on `.transform-internal*` indices. If you used + // transforms prior to 7.5, also do not + // give users any privileges on `.data-frame-internal*` indices. // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-transform.html PutTransform transform_put_transform.NewPutTransform - // Resets an existing transform. + // Resets a transform. + // Before you can reset it, you must stop it; alternatively, use the `force` + // query parameter. + // If the destination index was created by the transform, it is deleted. // https://www.elastic.co/guide/en/elasticsearch/reference/current/reset-transform.html ResetTransform transform_reset_transform.NewResetTransform // Schedules now a transform. + // + // If you _schedule_now a transform, it will process the new data instantly, + // without waiting for the configured frequency interval. After _schedule_now + // API is called, + // the transform will be processed again at now + frequency unless _schedule_now + // API + // is called again in the meantime. // https://www.elastic.co/guide/en/elasticsearch/reference/current/schedule-now-transform.html ScheduleNowTransform transform_schedule_now_transform.NewScheduleNowTransform - // Starts one or more transforms. + // Starts a transform. + // + // When you start a transform, it creates the destination index if it does not + // already exist. The `number_of_shards` is + // set to `1` and the `auto_expand_replicas` is set to `0-1`. If it is a pivot + // transform, it deduces the mapping + // definitions for the destination index from the source indices and the + // transform aggregations. If fields in the + // destination index are derived from scripts (as in the case of + // `scripted_metric` or `bucket_script` aggregations), + // the transform uses dynamic mappings unless an index template exists. If it is + // a latest transform, it does not deduce + // mapping definitions; it uses dynamic mappings. To use explicit mappings, + // create the destination index before you + // start the transform. Alternatively, you can create an index template, though + // it does not affect the deduced mappings + // in a pivot transform. + // + // When the transform starts, a series of validations occur to ensure its + // success. If you deferred validation when you + // created the transform, they occur when you start the transform—​with the + // exception of privilege checks. When + // Elasticsearch security features are enabled, the transform remembers which + // roles the user that created it had at the + // time of creation and uses those same roles. If those roles do not have the + // required privileges on the source and + // destination indices, the transform fails when it attempts unauthorized + // operations. // https://www.elastic.co/guide/en/elasticsearch/reference/current/start-transform.html StartTransform transform_start_transform.NewStartTransform // Stops one or more transforms. // https://www.elastic.co/guide/en/elasticsearch/reference/current/stop-transform.html StopTransform transform_stop_transform.NewStopTransform // Updates certain properties of a transform. + // + // All updated properties except `description` do not take effect until after + // the transform starts the next checkpoint, + // thus there is data consistency in each checkpoint. To use this API, you must + // have `read` and `view_index_metadata` + // privileges for the source indices. You must also have `index` and `read` + // privileges for the destination index. When + // Elasticsearch security features are enabled, the transform remembers which + // roles the user who updated it had at the + // time of update and runs with those privileges. // https://www.elastic.co/guide/en/elasticsearch/reference/current/update-transform.html UpdateTransform transform_update_transform.NewUpdateTransform // Upgrades all transforms. + // This API identifies transforms that have a legacy configuration format and + // upgrades them to the latest version. It + // also cleans up the internal data structures that store the transform state + // and checkpoints. The upgrade does not + // affect the source and destination indices. The upgrade also does not affect + // the roles that transforms use when + // Elasticsearch security features are enabled; the role used to read source + // data and write to the destination index + // remains unchanged. // https://www.elastic.co/guide/en/elasticsearch/reference/current/upgrade-transforms.html UpgradeTransforms transform_upgrade_transforms.NewUpgradeTransforms } @@ -2011,7 +2828,13 @@ type Watcher struct { // Removes a watch from Watcher. // https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-delete-watch.html DeleteWatch watcher_delete_watch.NewDeleteWatch - // Forces the execution of a stored watch. + // This API can be used to force execution of the watch outside of its + // triggering logic or to simulate the watch execution for debugging purposes. + // For testing and debugging purposes, you also have fine-grained control on how + // the watch runs. You can execute the watch without executing all of its + // actions or alternatively by simulating them. You can also force execution by + // ignoring the watch condition and control whether a watch record would be + // written to the watch history after execution. // https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-execute-watch.html ExecuteWatch watcher_execute_watch.NewExecuteWatch // Retrieve settings for the watcher system index @@ -2041,10 +2864,11 @@ type Watcher struct { } type Xpack struct { - // Retrieves information about the installed X-Pack features. + // Provides general information about the installed X-Pack features. // https://www.elastic.co/guide/en/elasticsearch/reference/current/info-api.html Info xpack_info.NewInfo - // Retrieves usage information about the installed X-Pack features. + // This API provides information about which features are currently enabled and + // available under the current license and some usage statistics. // https://www.elastic.co/guide/en/elasticsearch/reference/current/usage-api.html Usage xpack_usage.NewUsage } @@ -2052,6 +2876,7 @@ type Xpack struct { type API struct { AsyncSearch AsyncSearch Autoscaling Autoscaling + Capabilities Capabilities Cat Cat Ccr Ccr Cluster Cluster @@ -2073,7 +2898,8 @@ type API struct { Ml Ml Monitoring Monitoring Nodes Nodes - QueryRuleset QueryRuleset + Profiling Profiling + QueryRules QueryRules Rollup Rollup SearchApplication SearchApplication SearchableSnapshots SearchableSnapshots @@ -2090,55 +2916,60 @@ type API struct { Watcher Watcher Xpack Xpack - // Allows to perform multiple index/update/delete operations in a single - // request. + // Performs multiple indexing or delete operations in a single API call. + // This reduces overhead and can greatly increase indexing speed. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html Bulk core_bulk.NewBulk - // Explicitly clears the search context for a scroll. + // Clears the search context and results for a scrolling search. // https://www.elastic.co/guide/en/elasticsearch/reference/current/clear-scroll-api.html ClearScroll core_clear_scroll.NewClearScroll - // Close a point in time + // Closes a point-in-time. // https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html ClosePointInTime core_close_point_in_time.NewClosePointInTime // Returns number of documents matching a query. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-count.html Count core_count.NewCount - // Creates a new document in the index. - // - // Returns a 409 response when a document with a same ID already exists in the - // index. + // Adds a JSON document to the specified data stream or index and makes it + // searchable. + // If the target is an index and the document already exists, the request + // updates the document and increments its version. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html Create core_create.NewCreate - // Removes a document from the index. + // Removes a JSON document from the specified index. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete.html Delete core_delete.NewDelete - // Deletes documents matching the provided query. + // Deletes documents that match the specified query. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete-by-query.html DeleteByQuery core_delete_by_query.NewDeleteByQuery // Changes the number of requests per second for a particular Delete By Query // operation. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete-by-query.html DeleteByQueryRethrottle core_delete_by_query_rethrottle.NewDeleteByQueryRethrottle - // Deletes a script. + // Deletes a stored script or search template. // https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html DeleteScript core_delete_script.NewDeleteScript - // Returns information about whether a document exists in an index. + // Checks if a document in an index exists. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-get.html Exists core_exists.NewExists - // Returns information about whether a document source exists in an index. + // Checks if a document's `_source` is stored. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-get.html ExistsSource core_exists_source.NewExistsSource - // Returns information about why a specific matches (or doesn't match) a query. + // Returns information about why a specific document matches (or doesn’t match) + // a query. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-explain.html Explain core_explain.NewExplain - // Returns the information about the capabilities of fields among multiple - // indices. + // The field capabilities API returns the information about the capabilities of + // fields among multiple indices. + // The field capabilities API returns runtime fields like any other field. For + // example, a runtime field with a type + // of keyword is returned as any other field that belongs to the `keyword` + // family. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-field-caps.html FieldCaps core_field_caps.NewFieldCaps // Returns a document. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-get.html Get core_get.NewGet - // Returns a script. + // Retrieves a stored script or search template. // https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html GetScript core_get_script.NewGetScript // Returns all script contexts. @@ -2153,7 +2984,10 @@ type API struct { // Returns the health of the cluster. // https://www.elastic.co/guide/en/elasticsearch/reference/current/health-api.html HealthReport core_health_report.NewHealthReport - // Creates or updates a document in an index. + // Adds a JSON document to the specified data stream or index and makes it + // searchable. + // If the target is an index and the document already exists, the request + // updates the document and increments its version. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html Index core_index.NewIndex // Returns basic information about the cluster. @@ -2168,23 +3002,33 @@ type API struct { // Allows to execute several search operations in one request. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-multi-search.html Msearch core_msearch.NewMsearch - // Allows to execute several search template operations in one request. + // Runs multiple templated searches with a single request. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-multi-search.html MsearchTemplate core_msearch_template.NewMsearchTemplate // Returns multiple termvectors in one request. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-multi-termvectors.html Mtermvectors core_mtermvectors.NewMtermvectors - // Open a point in time that can be used in subsequent searches + // A search request by default executes against the most recent visible data of + // the target indices, + // which is called point in time. Elasticsearch pit (point in time) is a + // lightweight view into the + // state of the data as it existed when initiated. In some cases, it’s preferred + // to perform multiple + // search requests using the same point in time. For example, if refreshes + // happen between + // `search_after` requests, then the results of those requests might not be + // consistent as changes happening + // between searches are only visible to the more recent point in time. // https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html OpenPointInTime core_open_point_in_time.NewOpenPointInTime // Returns whether the cluster is running. // https://www.elastic.co/guide/en/elasticsearch/reference/current/index.html Ping core_ping.NewPing - // Creates or updates a script. + // Creates or updates a stored script or search template. // https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html PutScript core_put_script.NewPutScript - // Allows to evaluate the quality of ranked search results over a set of typical - // search queries + // Enables you to evaluate the quality of ranked search results over a set of + // typical search queries. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-rank-eval.html RankEval core_rank_eval.NewRankEval // Allows to copy documents from one index to another, optionally filtering the @@ -2194,19 +3038,22 @@ type API struct { // documents from a remote cluster. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html Reindex core_reindex.NewReindex - // Changes the number of requests per second for a particular Reindex operation. + // Copies documents from a source to a destination. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html ReindexRethrottle core_reindex_rethrottle.NewReindexRethrottle - // Allows to use the Mustache language to pre-render a search definition. + // Renders a search template as a search request body. // https://www.elastic.co/guide/en/elasticsearch/reference/current/render-search-template-api.html RenderSearchTemplate core_render_search_template.NewRenderSearchTemplate - // Allows an arbitrary script to be executed and a result to be returned + // Runs a script and returns a result. // https://www.elastic.co/guide/en/elasticsearch/painless/current/painless-execute-api.html ScriptsPainlessExecute core_scripts_painless_execute.NewScriptsPainlessExecute // Allows to retrieve a large numbers of results from a single search request. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-body.html#request-body-search-scroll Scroll core_scroll.NewScroll - // Returns results matching a query. + // Returns search hits that match the query defined in the request. + // You can provide search queries using the `q` query string parameter or the + // request body. + // If both are specified, only the query parameter is used. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-search.html Search core_search.NewSearch // Searches a vector tile for geospatial values. Returns results as a binary @@ -2217,7 +3064,7 @@ type API struct { // be executed against. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-shards.html SearchShards core_search_shards.NewSearchShards - // Allows to use the Mustache language to pre-render a search definition. + // Runs a search with a search template. // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html SearchTemplate core_search_template.NewSearchTemplate // The terms enum API can be used to discover terms in the index that begin @@ -2232,10 +3079,10 @@ type API struct { // Updates a document with a script or partial document. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update.html Update core_update.NewUpdate - // Updates documents that match the specified query. If no query is specified, - // performs an update on every document in the index without changing the - // source, - // for example to pick up a mapping change. + // Updates documents that match the specified query. + // If no query is specified, performs an update on every document in the data + // stream or index without modifying the source, which is useful for picking up + // mapping changes. // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update-by-query.html UpdateByQuery core_update_by_query.NewUpdateByQuery // Changes the number of requests per second for a particular Update By Query @@ -2262,6 +3109,11 @@ func New(tp elastictransport.Interface) *API { PutAutoscalingPolicy: autoscaling_put_autoscaling_policy.NewPutAutoscalingPolicyFunc(tp), }, + // Capabilities + Capabilities: Capabilities{ + Capabilities: capabilities.NewCapabilitiesFunc(tp), + }, + // Cat Cat: Cat{ Aliases: cat_aliases.NewAliasesFunc(tp), @@ -2403,7 +3255,8 @@ func New(tp elastictransport.Interface) *API { // Esql Esql: Esql{ - Query: esql_query.NewQueryFunc(tp), + AsyncQuery: esql_async_query.NewAsyncQueryFunc(tp), + Query: esql_query.NewQueryFunc(tp), }, // Features @@ -2505,10 +3358,10 @@ func New(tp elastictransport.Interface) *API { // Inference Inference: Inference{ - DeleteModel: inference_delete_model.NewDeleteModelFunc(tp), - GetModel: inference_get_model.NewGetModelFunc(tp), - Inference: inference_inference.NewInferenceFunc(tp), - PutModel: inference_put_model.NewPutModelFunc(tp), + Delete: inference_delete.NewDeleteFunc(tp), + Get: inference_get.NewGetFunc(tp), + Inference: inference_inference.NewInferenceFunc(tp), + Put: inference_put.NewPutFunc(tp), }, // Ingest @@ -2617,6 +3470,7 @@ func New(tp elastictransport.Interface) *API { UpdateFilter: ml_update_filter.NewUpdateFilterFunc(tp), UpdateJob: ml_update_job.NewUpdateJobFunc(tp), UpdateModelSnapshot: ml_update_model_snapshot.NewUpdateModelSnapshotFunc(tp), + UpdateTrainedModelDeployment: ml_update_trained_model_deployment.NewUpdateTrainedModelDeploymentFunc(tp), UpgradeJobSnapshot: ml_upgrade_job_snapshot.NewUpgradeJobSnapshotFunc(tp), Validate: ml_validate.NewValidateFunc(tp), ValidateDetector: ml_validate_detector.NewValidateDetectorFunc(tp), @@ -2638,12 +3492,23 @@ func New(tp elastictransport.Interface) *API { Usage: nodes_usage.NewUsageFunc(tp), }, - // QueryRuleset - QueryRuleset: QueryRuleset{ - Delete: query_ruleset_delete.NewDeleteFunc(tp), - Get: query_ruleset_get.NewGetFunc(tp), - List: query_ruleset_list.NewListFunc(tp), - Put: query_ruleset_put.NewPutFunc(tp), + // Profiling + Profiling: Profiling{ + Flamegraph: profiling_flamegraph.NewFlamegraphFunc(tp), + Stacktraces: profiling_stacktraces.NewStacktracesFunc(tp), + Status: profiling_status.NewStatusFunc(tp), + TopnFunctions: profiling_topn_functions.NewTopnFunctionsFunc(tp), + }, + + // QueryRules + QueryRules: QueryRules{ + DeleteRule: query_rules_delete_rule.NewDeleteRuleFunc(tp), + DeleteRuleset: query_rules_delete_ruleset.NewDeleteRulesetFunc(tp), + GetRule: query_rules_get_rule.NewGetRuleFunc(tp), + GetRuleset: query_rules_get_ruleset.NewGetRulesetFunc(tp), + ListRulesets: query_rules_list_rulesets.NewListRulesetsFunc(tp), + PutRule: query_rules_put_rule.NewPutRuleFunc(tp), + PutRuleset: query_rules_put_ruleset.NewPutRulesetFunc(tp), }, // Rollup @@ -2728,6 +3593,7 @@ func New(tp elastictransport.Interface) *API { PutRoleMapping: security_put_role_mapping.NewPutRoleMappingFunc(tp), PutUser: security_put_user.NewPutUserFunc(tp), QueryApiKeys: security_query_api_keys.NewQueryApiKeysFunc(tp), + QueryUser: security_query_user.NewQueryUserFunc(tp), SamlAuthenticate: security_saml_authenticate.NewSamlAuthenticateFunc(tp), SamlCompleteLogout: security_saml_complete_logout.NewSamlCompleteLogoutFunc(tp), SamlInvalidate: security_saml_invalidate.NewSamlInvalidateFunc(tp), @@ -2810,13 +3676,16 @@ func New(tp elastictransport.Interface) *API { // TextStructure TextStructure: TextStructure{ - FindStructure: text_structure_find_structure.NewFindStructureFunc(tp), - TestGrokPattern: text_structure_test_grok_pattern.NewTestGrokPatternFunc(tp), + FindFieldStructure: text_structure_find_field_structure.NewFindFieldStructureFunc(tp), + FindMessageStructure: text_structure_find_message_structure.NewFindMessageStructureFunc(tp), + FindStructure: text_structure_find_structure.NewFindStructureFunc(tp), + TestGrokPattern: text_structure_test_grok_pattern.NewTestGrokPatternFunc(tp), }, // Transform Transform: Transform{ DeleteTransform: transform_delete_transform.NewDeleteTransformFunc(tp), + GetNodeStats: transform_get_node_stats.NewGetNodeStatsFunc(tp), GetTransform: transform_get_transform.NewGetTransformFunc(tp), GetTransformStats: transform_get_transform_stats.NewGetTransformStatsFunc(tp), PreviewTransform: transform_preview_transform.NewPreviewTransformFunc(tp), diff --git a/typedapi/asyncsearch/delete/delete.go b/typedapi/asyncsearch/delete/delete.go index 343613d9a3..c9ce522f28 100644 --- a/typedapi/asyncsearch/delete/delete.go +++ b/typedapi/asyncsearch/delete/delete.go @@ -16,10 +16,15 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Deletes an async search by ID. If the search is still running, the search -// request will be cancelled. Otherwise, the saved search results are deleted. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Deletes an async search by identifier. +// If the search is still running, the search request will be cancelled. +// Otherwise, the saved search results are deleted. +// If the Elasticsearch security features are enabled, the deletion of a +// specific async search is restricted to: the authenticated user that submitted +// the original search request; users that have the `cancel_task` cluster +// privilege. package delete import ( @@ -28,9 +33,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -77,8 +82,13 @@ func NewDeleteFunc(tp elastictransport.Interface) NewDelete { } } -// Deletes an async search by ID. If the search is still running, the search -// request will be cancelled. Otherwise, the saved search results are deleted. +// Deletes an async search by identifier. +// If the search is still running, the search request will be cancelled. +// Otherwise, the saved search results are deleted. +// If the Elasticsearch security features are enabled, the deletion of a +// specific async search is restricted to: the authenticated user that submitted +// the original search request; users that have the `cancel_task` cluster +// privilege. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/async-search.html func New(tp elastictransport.Interface) *Delete { @@ -262,7 +272,7 @@ func (r Delete) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -298,3 +308,47 @@ func (r *Delete) _id(id string) *Delete { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Delete) ErrorTrace(errortrace bool) *Delete { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Delete) FilterPath(filterpaths ...string) *Delete { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Delete) Human(human bool) *Delete { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Delete) Pretty(pretty bool) *Delete { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/asyncsearch/delete/response.go b/typedapi/asyncsearch/delete/response.go index ec7a6dbed8..b2c8db0b10 100644 --- a/typedapi/asyncsearch/delete/response.go +++ b/typedapi/asyncsearch/delete/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package delete // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/async_search/delete/AsyncSearchDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/async_search/delete/AsyncSearchDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/asyncsearch/get/get.go b/typedapi/asyncsearch/get/get.go index f7a8170ef1..1c21b4a621 100644 --- a/typedapi/asyncsearch/get/get.go +++ b/typedapi/asyncsearch/get/get.go @@ -16,10 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves the results of a previously submitted async search request given -// its ID. +// its identifier. +// If the Elasticsearch security features are enabled, access to the results of +// a specific async search is restricted to the user or API key that submitted +// it. package get import ( @@ -28,7 +31,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -79,7 +81,10 @@ func NewGetFunc(tp elastictransport.Interface) NewGet { } // Retrieves the results of a previously submitted async search request given -// its ID. +// its identifier. +// If the Elasticsearch security features are enabled, access to the results of +// a specific async search is restricted to the user or API key that submitted +// it. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/async-search.html func New(tp elastictransport.Interface) *Get { @@ -265,7 +270,7 @@ func (r Get) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -338,3 +343,47 @@ func (r *Get) WaitForCompletionTimeout(duration string) *Get { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Get) ErrorTrace(errortrace bool) *Get { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Get) FilterPath(filterpaths ...string) *Get { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Get) Human(human bool) *Get { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Get) Pretty(pretty bool) *Get { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/asyncsearch/get/response.go b/typedapi/asyncsearch/get/response.go index c08a7d2fdb..e0a9c2ab0f 100644 --- a/typedapi/asyncsearch/get/response.go +++ b/typedapi/asyncsearch/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/async_search/get/AsyncSearchGetResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/async_search/get/AsyncSearchGetResponse.ts#L22-L24 type Response struct { // CompletionTime Indicates when the async search completed. Only present diff --git a/typedapi/asyncsearch/status/response.go b/typedapi/asyncsearch/status/response.go index f983b5b6b2..086aba55e4 100644 --- a/typedapi/asyncsearch/status/response.go +++ b/typedapi/asyncsearch/status/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package status @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package status // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/async_search/status/AsyncSearchStatusResponse.ts#L39-L41 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/async_search/status/AsyncSearchStatusResponse.ts#L39-L41 type Response struct { // Clusters_ Metadata about clusters involved in the cross-cluster search. diff --git a/typedapi/asyncsearch/status/status.go b/typedapi/asyncsearch/status/status.go index 85315ea8e1..91f14399f0 100644 --- a/typedapi/asyncsearch/status/status.go +++ b/typedapi/asyncsearch/status/status.go @@ -16,10 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed +// Get async search status // Retrieves the status of a previously submitted async search request given its -// ID. +// identifier, without retrieving search results. +// If the Elasticsearch security features are enabled, use of this API is +// restricted to the `monitoring_user` role. package status import ( @@ -28,9 +31,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -77,8 +80,11 @@ func NewStatusFunc(tp elastictransport.Interface) NewStatus { } } +// Get async search status // Retrieves the status of a previously submitted async search request given its -// ID. +// identifier, without retrieving search results. +// If the Elasticsearch security features are enabled, use of this API is +// restricted to the `monitoring_user` role. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/async-search.html func New(tp elastictransport.Interface) *Status { @@ -264,7 +270,7 @@ func (r Status) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -300,3 +306,47 @@ func (r *Status) _id(id string) *Status { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Status) ErrorTrace(errortrace bool) *Status { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Status) FilterPath(filterpaths ...string) *Status { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Status) Human(human bool) *Status { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Status) Pretty(pretty bool) *Status { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/asyncsearch/submit/request.go b/typedapi/asyncsearch/submit/request.go index 971e49d4ae..a6141f3708 100644 --- a/typedapi/asyncsearch/submit/request.go +++ b/typedapi/asyncsearch/submit/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package submit @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package submit // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/async_search/submit/AsyncSearchSubmitRequest.ts#L55-L286 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/async_search/submit/AsyncSearchSubmitRequest.ts#L55-L286 type Request struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` Collapse *types.FieldCollapse `json:"collapse,omitempty"` @@ -57,7 +57,7 @@ type Request struct { // IndicesBoost Boosts the _score of documents from specified indices. IndicesBoost []map[string]types.Float64 `json:"indices_boost,omitempty"` // Knn Defines the approximate kNN search to run. - Knn []types.KnnQuery `json:"knn,omitempty"` + Knn []types.KnnSearch `json:"knn,omitempty"` // MinScore Minimum _score for matching documents. Documents with a lower _score are // not included in the search results. MinScore *types.Float64 `json:"min_score,omitempty"` @@ -131,6 +131,7 @@ func NewRequest() *Request { Ext: make(map[string]json.RawMessage, 0), ScriptFields: make(map[string]types.ScriptField, 0), } + return r } @@ -179,7 +180,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "explain": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -207,7 +208,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "from": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -235,7 +236,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { rawMsg := json.RawMessage{} dec.Decode(&rawMsg) if !bytes.HasPrefix(rawMsg, []byte("[")) { - o := types.NewKnnQuery() + o := types.NewKnnSearch() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Knn", err) } @@ -248,7 +249,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "min_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -274,7 +275,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "profile": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -327,7 +328,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "seq_no_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -342,7 +343,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -409,7 +410,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "terminate_after": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -436,7 +437,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Timeout = &o case "track_scores": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -455,7 +456,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "version": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/asyncsearch/submit/response.go b/typedapi/asyncsearch/submit/response.go index e6da63911e..9786b2b5e6 100644 --- a/typedapi/asyncsearch/submit/response.go +++ b/typedapi/asyncsearch/submit/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package submit @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package submit // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/async_search/submit/AsyncSearchSubmitResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/async_search/submit/AsyncSearchSubmitResponse.ts#L22-L24 type Response struct { // CompletionTime Indicates when the async search completed. Only present diff --git a/typedapi/asyncsearch/submit/submit.go b/typedapi/asyncsearch/submit/submit.go index f2b5658a3d..c7192edd98 100644 --- a/typedapi/asyncsearch/submit/submit.go +++ b/typedapi/asyncsearch/submit/submit.go @@ -16,9 +16,19 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Executes a search request asynchronously. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Runs a search request asynchronously. +// When the primary sort of the results is an indexed field, shards get sorted +// based on minimum and maximum value that they hold for that field, hence +// partial results become available following the sort criteria that was +// requested. +// Warning: Async search does not support scroll nor search requests that only +// include the suggest section. +// By default, Elasticsearch doesn’t allow you to store an async search response +// larger than 10Mb and an attempt to do this results in an error. +// The maximum allowed size for a stored async search response can be set by +// changing the `search.max_async_search_response_size` cluster level setting. package submit import ( @@ -83,7 +93,17 @@ func NewSubmitFunc(tp elastictransport.Interface) NewSubmit { } } -// Executes a search request asynchronously. +// Runs a search request asynchronously. +// When the primary sort of the results is an indexed field, shards get sorted +// based on minimum and maximum value that they hold for that field, hence +// partial results become available following the sort criteria that was +// requested. +// Warning: Async search does not support scroll nor search requests that only +// include the suggest section. +// By default, Elasticsearch doesn’t allow you to store an async search response +// larger than 10Mb and an attempt to do this results in an error. +// The maximum allowed size for a stored async search response can be set by +// changing the `search.max_async_search_response_size` cluster level setting. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/async-search.html func New(tp elastictransport.Interface) *Submit { @@ -603,6 +623,50 @@ func (r *Submit) Q(q string) *Submit { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Submit) ErrorTrace(errortrace bool) *Submit { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Submit) FilterPath(filterpaths ...string) *Submit { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Submit) Human(human bool) *Submit { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Submit) Pretty(pretty bool) *Submit { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: aggregations func (r *Submit) Aggregations(aggregations map[string]types.Aggregations) *Submit { @@ -684,7 +748,7 @@ func (r *Submit) IndicesBoost(indicesboosts ...map[string]types.Float64) *Submit // Knn Defines the approximate kNN search to run. // API name: knn -func (r *Submit) Knn(knns ...types.KnnQuery) *Submit { +func (r *Submit) Knn(knns ...types.KnnSearch) *Submit { r.req.Knn = knns return r diff --git a/typedapi/autoscaling/deleteautoscalingpolicy/delete_autoscaling_policy.go b/typedapi/autoscaling/deleteautoscalingpolicy/delete_autoscaling_policy.go index 84b8bec9fb..acd63e799d 100644 --- a/typedapi/autoscaling/deleteautoscalingpolicy/delete_autoscaling_policy.go +++ b/typedapi/autoscaling/deleteautoscalingpolicy/delete_autoscaling_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes an autoscaling policy. Designed for indirect use by ECE/ESS and ECK. // Direct use is not supported. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r DeleteAutoscalingPolicy) IsSuccess(providedCtx context.Context) (bool, e if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -300,3 +300,47 @@ func (r *DeleteAutoscalingPolicy) _name(name string) *DeleteAutoscalingPolicy { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteAutoscalingPolicy) ErrorTrace(errortrace bool) *DeleteAutoscalingPolicy { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteAutoscalingPolicy) FilterPath(filterpaths ...string) *DeleteAutoscalingPolicy { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteAutoscalingPolicy) Human(human bool) *DeleteAutoscalingPolicy { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteAutoscalingPolicy) Pretty(pretty bool) *DeleteAutoscalingPolicy { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/autoscaling/deleteautoscalingpolicy/response.go b/typedapi/autoscaling/deleteautoscalingpolicy/response.go index 9af73b792f..1e7729430a 100644 --- a/typedapi/autoscaling/deleteautoscalingpolicy/response.go +++ b/typedapi/autoscaling/deleteautoscalingpolicy/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deleteautoscalingpolicy // Response holds the response body struct for the package deleteautoscalingpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/autoscaling/delete_autoscaling_policy/DeleteAutoscalingPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/autoscaling/delete_autoscaling_policy/DeleteAutoscalingPolicyResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/autoscaling/getautoscalingcapacity/get_autoscaling_capacity.go b/typedapi/autoscaling/getautoscalingcapacity/get_autoscaling_capacity.go index 389d218978..da2021c1c0 100644 --- a/typedapi/autoscaling/getautoscalingcapacity/get_autoscaling_capacity.go +++ b/typedapi/autoscaling/getautoscalingcapacity/get_autoscaling_capacity.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Gets the current autoscaling capacity based on the configured autoscaling // policy. Designed for indirect use by ECE/ESS and ECK. Direct use is not @@ -29,9 +29,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -252,7 +252,7 @@ func (r GetAutoscalingCapacity) IsSuccess(providedCtx context.Context) (bool, er if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -279,3 +279,47 @@ func (r *GetAutoscalingCapacity) Header(key, value string) *GetAutoscalingCapaci return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetAutoscalingCapacity) ErrorTrace(errortrace bool) *GetAutoscalingCapacity { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetAutoscalingCapacity) FilterPath(filterpaths ...string) *GetAutoscalingCapacity { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetAutoscalingCapacity) Human(human bool) *GetAutoscalingCapacity { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetAutoscalingCapacity) Pretty(pretty bool) *GetAutoscalingCapacity { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/autoscaling/getautoscalingcapacity/response.go b/typedapi/autoscaling/getautoscalingcapacity/response.go index 91a6c206b4..bef2f5646b 100644 --- a/typedapi/autoscaling/getautoscalingcapacity/response.go +++ b/typedapi/autoscaling/getautoscalingcapacity/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getautoscalingcapacity @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getautoscalingcapacity // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L25-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L25-L29 type Response struct { Policies map[string]types.AutoscalingDeciders `json:"policies"` } diff --git a/typedapi/autoscaling/getautoscalingpolicy/get_autoscaling_policy.go b/typedapi/autoscaling/getautoscalingpolicy/get_autoscaling_policy.go index 1cf19b9dde..a59fe6d011 100644 --- a/typedapi/autoscaling/getautoscalingpolicy/get_autoscaling_policy.go +++ b/typedapi/autoscaling/getautoscalingpolicy/get_autoscaling_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves an autoscaling policy. Designed for indirect use by ECE/ESS and // ECK. Direct use is not supported. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r GetAutoscalingPolicy) IsSuccess(providedCtx context.Context) (bool, erro if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -300,3 +300,47 @@ func (r *GetAutoscalingPolicy) _name(name string) *GetAutoscalingPolicy { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetAutoscalingPolicy) ErrorTrace(errortrace bool) *GetAutoscalingPolicy { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetAutoscalingPolicy) FilterPath(filterpaths ...string) *GetAutoscalingPolicy { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetAutoscalingPolicy) Human(human bool) *GetAutoscalingPolicy { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetAutoscalingPolicy) Pretty(pretty bool) *GetAutoscalingPolicy { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/autoscaling/getautoscalingpolicy/response.go b/typedapi/autoscaling/getautoscalingpolicy/response.go index b5398f1893..0f967b15e1 100644 --- a/typedapi/autoscaling/getautoscalingpolicy/response.go +++ b/typedapi/autoscaling/getautoscalingpolicy/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getautoscalingpolicy @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getautoscalingpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/autoscaling/get_autoscaling_policy/GetAutoscalingPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/autoscaling/get_autoscaling_policy/GetAutoscalingPolicyResponse.ts#L22-L24 type Response struct { // Deciders Decider settings diff --git a/typedapi/autoscaling/putautoscalingpolicy/put_autoscaling_policy.go b/typedapi/autoscaling/putautoscalingpolicy/put_autoscaling_policy.go index 7f9ecc0839..93836f1954 100644 --- a/typedapi/autoscaling/putautoscalingpolicy/put_autoscaling_policy.go +++ b/typedapi/autoscaling/putautoscalingpolicy/put_autoscaling_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates a new autoscaling policy. Designed for indirect use by ECE/ESS and // ECK. Direct use is not supported. @@ -31,6 +31,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -92,6 +93,8 @@ func New(tp elastictransport.Interface) *PutAutoscalingPolicy { headers: make(http.Header), buf: gobytes.NewBuffer(nil), + + req: NewRequest(), } if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { @@ -314,6 +317,50 @@ func (r *PutAutoscalingPolicy) _name(name string) *PutAutoscalingPolicy { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutAutoscalingPolicy) ErrorTrace(errortrace bool) *PutAutoscalingPolicy { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutAutoscalingPolicy) FilterPath(filterpaths ...string) *PutAutoscalingPolicy { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutAutoscalingPolicy) Human(human bool) *PutAutoscalingPolicy { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutAutoscalingPolicy) Pretty(pretty bool) *PutAutoscalingPolicy { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Deciders Decider settings // API name: deciders func (r *PutAutoscalingPolicy) Deciders(deciders map[string]json.RawMessage) *PutAutoscalingPolicy { diff --git a/typedapi/autoscaling/putautoscalingpolicy/request.go b/typedapi/autoscaling/putautoscalingpolicy/request.go index fd987158e7..f4c62d878a 100644 --- a/typedapi/autoscaling/putautoscalingpolicy/request.go +++ b/typedapi/autoscaling/putautoscalingpolicy/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putautoscalingpolicy @@ -26,5 +26,12 @@ import ( // Request holds the request body struct for the package putautoscalingpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/autoscaling/put_autoscaling_policy/PutAutoscalingPolicyRequest.ts#L24-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/autoscaling/put_autoscaling_policy/PutAutoscalingPolicyRequest.ts#L24-L35 type Request = types.AutoscalingPolicy + +// NewRequest returns a Request +func NewRequest() *Request { + r := types.NewAutoscalingPolicy() + + return r +} diff --git a/typedapi/autoscaling/putautoscalingpolicy/response.go b/typedapi/autoscaling/putautoscalingpolicy/response.go index 723ab69d4c..fc0d0847f4 100644 --- a/typedapi/autoscaling/putautoscalingpolicy/response.go +++ b/typedapi/autoscaling/putautoscalingpolicy/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putautoscalingpolicy // Response holds the response body struct for the package putautoscalingpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/autoscaling/put_autoscaling_policy/PutAutoscalingPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/autoscaling/put_autoscaling_policy/PutAutoscalingPolicyResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/capabilities/capabilities.go b/typedapi/capabilities/capabilities.go new file mode 100644 index 0000000000..1ae2840fca --- /dev/null +++ b/typedapi/capabilities/capabilities.go @@ -0,0 +1,225 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Checks if the specified combination of method, API, parameters, and arbitrary +// capabilities are supported +package capabilities + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type Capabilities struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + raw io.Reader + + paramSet int + + spanStarted bool + + instrument elastictransport.Instrumentation +} + +// NewCapabilities type alias for index. +type NewCapabilities func() *Capabilities + +// NewCapabilitiesFunc returns a new instance of Capabilities with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewCapabilitiesFunc(tp elastictransport.Interface) NewCapabilities { + return func() *Capabilities { + n := New(tp) + + return n + } +} + +// Checks if the specified combination of method, API, parameters, and arbitrary +// capabilities are supported +// +// https://www.elastic.co/guide/en/elasticsearch/reference/current/capabilities.html +func New(tp elastictransport.Interface) *Capabilities { + r := &Capabilities{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + } + + if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { + if instrument := instrumented.InstrumentationEnabled(); instrument != nil { + r.instrument = instrument + } + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *Capabilities) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == 0: + path.WriteString("/") + path.WriteString("_capabilities") + + method = http.MethodGet + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.raw) + } else { + req, err = http.NewRequest(method, r.path.String(), r.raw) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r Capabilities) Perform(providedCtx context.Context) (*http.Response, error) { + var ctx context.Context + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + if r.spanStarted == false { + ctx := instrument.Start(providedCtx, "capabilities") + defer instrument.Close(ctx) + } + } + if ctx == nil { + ctx = providedCtx + } + + req, err := r.HttpRequest(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.BeforeRequest(req, "capabilities") + if reader := instrument.RecordRequestBody(ctx, "capabilities", r.raw); reader != nil { + req.Body = reader + } + } + res, err := r.transport.Perform(req) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "capabilities") + } + if err != nil { + localErr := fmt.Errorf("an error happened during the Capabilities query execution: %w", err) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, localErr) + } + return nil, localErr + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a capabilities.Response +func (r Capabilities) Do(ctx context.Context) (bool, error) { + return r.IsSuccess(ctx) +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r Capabilities) IsSuccess(providedCtx context.Context) (bool, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "capabilities") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(io.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + if res.StatusCode != 404 { + err := fmt.Errorf("an error happened during the Capabilities query execution, status code: %d", res.StatusCode) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return false, err + } + + return false, nil +} + +// Header set a key, value pair in the Capabilities headers map. +func (r *Capabilities) Header(key, value string) *Capabilities { + r.headers.Set(key, value) + + return r +} diff --git a/typedapi/cat/aliases/aliases.go b/typedapi/cat/aliases/aliases.go index e4f5d66c26..fdd1aa38fb 100644 --- a/typedapi/cat/aliases/aliases.go +++ b/typedapi/cat/aliases/aliases.go @@ -16,10 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Shows information about currently configured aliases to indices including -// filter and routing infos. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Retrieves the cluster’s index aliases, including filter and routing +// information. +// The API does not return data stream aliases. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or the Kibana console. They are not intended for use by applications. +// For application consumption, use the aliases API. package aliases import ( @@ -28,9 +32,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,8 +80,12 @@ func NewAliasesFunc(tp elastictransport.Interface) NewAliases { } } -// Shows information about currently configured aliases to indices including -// filter and routing infos. +// Retrieves the cluster’s index aliases, including filter and routing +// information. +// The API does not return data stream aliases. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or the Kibana console. They are not intended for use by applications. +// For application consumption, use the aliases API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-alias.html func New(tp elastictransport.Interface) *Aliases { @@ -270,7 +278,7 @@ func (r Aliases) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -320,3 +328,110 @@ func (r *Aliases) ExpandWildcards(expandwildcards ...expandwildcard.ExpandWildca return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Aliases) Format(format string) *Aliases { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Aliases) H(names ...string) *Aliases { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Aliases) Help(help bool) *Aliases { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Aliases) Local(local bool) *Aliases { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Aliases) MasterTimeout(duration string) *Aliases { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Aliases) S(names ...string) *Aliases { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Aliases) V(v bool) *Aliases { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Aliases) ErrorTrace(errortrace bool) *Aliases { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Aliases) FilterPath(filterpaths ...string) *Aliases { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Aliases) Human(human bool) *Aliases { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Aliases) Pretty(pretty bool) *Aliases { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/aliases/response.go b/typedapi/cat/aliases/response.go index e04070770f..c693a713c4 100644 --- a/typedapi/cat/aliases/response.go +++ b/typedapi/cat/aliases/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package aliases @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package aliases // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/aliases/CatAliasesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/aliases/CatAliasesResponse.ts#L22-L24 type Response []types.AliasesRecord diff --git a/typedapi/cat/allocation/allocation.go b/typedapi/cat/allocation/allocation.go index 568e09235f..abce605fbf 100644 --- a/typedapi/cat/allocation/allocation.go +++ b/typedapi/cat/allocation/allocation.go @@ -16,10 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Provides a snapshot of how many shards are allocated to each data node and -// how much disk space they are using. +// Provides a snapshot of the number of shards allocated to each data node and +// their disk space. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. package allocation import ( @@ -28,9 +30,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,8 +78,10 @@ func NewAllocationFunc(tp elastictransport.Interface) NewAllocation { } } -// Provides a snapshot of how many shards are allocated to each data node and -// how much disk space they are using. +// Provides a snapshot of the number of shards allocated to each data node and +// their disk space. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-allocation.html func New(tp elastictransport.Interface) *Allocation { @@ -270,7 +274,7 @@ func (r Allocation) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -315,3 +319,110 @@ func (r *Allocation) Bytes(bytes bytes.Bytes) *Allocation { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Allocation) Format(format string) *Allocation { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Allocation) H(names ...string) *Allocation { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Allocation) Help(help bool) *Allocation { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Allocation) Local(local bool) *Allocation { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Allocation) MasterTimeout(duration string) *Allocation { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Allocation) S(names ...string) *Allocation { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Allocation) V(v bool) *Allocation { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Allocation) ErrorTrace(errortrace bool) *Allocation { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Allocation) FilterPath(filterpaths ...string) *Allocation { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Allocation) Human(human bool) *Allocation { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Allocation) Pretty(pretty bool) *Allocation { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/allocation/response.go b/typedapi/cat/allocation/response.go index a0f4d08181..64309f3098 100644 --- a/typedapi/cat/allocation/response.go +++ b/typedapi/cat/allocation/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package allocation @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package allocation // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/allocation/CatAllocationResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/allocation/CatAllocationResponse.ts#L22-L24 type Response []types.AllocationRecord diff --git a/typedapi/cat/componenttemplates/component_templates.go b/typedapi/cat/componenttemplates/component_templates.go index 0eb68d95a9..d26674b71d 100644 --- a/typedapi/cat/componenttemplates/component_templates.go +++ b/typedapi/cat/componenttemplates/component_templates.go @@ -16,9 +16,15 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Returns information about existing component_templates templates. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Returns information about component templates in a cluster. +// Component templates are building blocks for constructing index templates that +// specify index mappings, settings, and aliases. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. +// They are not intended for use by applications. For application consumption, +// use the get component template API. package componenttemplates import ( @@ -27,9 +33,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -74,7 +80,13 @@ func NewComponentTemplatesFunc(tp elastictransport.Interface) NewComponentTempla } } -// Returns information about existing component_templates templates. +// Returns information about component templates in a cluster. +// Component templates are building blocks for constructing index templates that +// specify index mappings, settings, and aliases. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. +// They are not intended for use by applications. For application consumption, +// use the get component template API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-component-templates.html func New(tp elastictransport.Interface) *ComponentTemplates { @@ -267,7 +279,7 @@ func (r ComponentTemplates) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -304,3 +316,110 @@ func (r *ComponentTemplates) Name(name string) *ComponentTemplates { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *ComponentTemplates) Format(format string) *ComponentTemplates { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *ComponentTemplates) H(names ...string) *ComponentTemplates { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *ComponentTemplates) Help(help bool) *ComponentTemplates { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *ComponentTemplates) Local(local bool) *ComponentTemplates { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *ComponentTemplates) MasterTimeout(duration string) *ComponentTemplates { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *ComponentTemplates) S(names ...string) *ComponentTemplates { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *ComponentTemplates) V(v bool) *ComponentTemplates { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ComponentTemplates) ErrorTrace(errortrace bool) *ComponentTemplates { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ComponentTemplates) FilterPath(filterpaths ...string) *ComponentTemplates { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ComponentTemplates) Human(human bool) *ComponentTemplates { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ComponentTemplates) Pretty(pretty bool) *ComponentTemplates { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/componenttemplates/response.go b/typedapi/cat/componenttemplates/response.go index d6f66a3a97..71c136ba9c 100644 --- a/typedapi/cat/componenttemplates/response.go +++ b/typedapi/cat/componenttemplates/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package componenttemplates @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package componenttemplates // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/component_templates/CatComponentTemplatesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/component_templates/CatComponentTemplatesResponse.ts#L22-L24 type Response []types.CatComponentTemplate diff --git a/typedapi/cat/count/count.go b/typedapi/cat/count/count.go index da27df3d81..0f99bf6a4d 100644 --- a/typedapi/cat/count/count.go +++ b/typedapi/cat/count/count.go @@ -16,10 +16,16 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Provides quick access to the document count of the entire cluster, or -// individual indices. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Provides quick access to a document count for a data stream, an index, or an +// entire cluster. +// NOTE: The document count only includes live documents, not deleted documents +// which have not yet been removed by the merge process. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. +// They are not intended for use by applications. For application consumption, +// use the count API. package count import ( @@ -28,9 +34,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -75,8 +81,14 @@ func NewCountFunc(tp elastictransport.Interface) NewCount { } } -// Provides quick access to the document count of the entire cluster, or -// individual indices. +// Provides quick access to a document count for a data stream, an index, or an +// entire cluster. +// NOTE: The document count only includes live documents, not deleted documents +// which have not yet been removed by the merge process. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. +// They are not intended for use by applications. For application consumption, +// use the count API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-count.html func New(tp elastictransport.Interface) *Count { @@ -269,7 +281,7 @@ func (r Count) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -308,3 +320,110 @@ func (r *Count) Index(index string) *Count { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Count) Format(format string) *Count { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Count) H(names ...string) *Count { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Count) Help(help bool) *Count { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Count) Local(local bool) *Count { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Count) MasterTimeout(duration string) *Count { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Count) S(names ...string) *Count { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Count) V(v bool) *Count { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Count) ErrorTrace(errortrace bool) *Count { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Count) FilterPath(filterpaths ...string) *Count { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Count) Human(human bool) *Count { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Count) Pretty(pretty bool) *Count { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/count/response.go b/typedapi/cat/count/response.go index 6e6f6b6efc..0a37182ed0 100644 --- a/typedapi/cat/count/response.go +++ b/typedapi/cat/count/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package count @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package count // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/count/CatCountResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/count/CatCountResponse.ts#L22-L24 type Response []types.CountRecord diff --git a/typedapi/cat/fielddata/fielddata.go b/typedapi/cat/fielddata/fielddata.go index 3ae5ee86df..0696466a03 100644 --- a/typedapi/cat/fielddata/fielddata.go +++ b/typedapi/cat/fielddata/fielddata.go @@ -16,10 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Shows how much heap memory is currently being used by fielddata on every data -// node in the cluster. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Returns the amount of heap memory currently used by the field data cache on +// every data node in the cluster. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. +// They are not intended for use by applications. For application consumption, +// use the nodes stats API. package fielddata import ( @@ -28,9 +32,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,8 +80,12 @@ func NewFielddataFunc(tp elastictransport.Interface) NewFielddata { } } -// Shows how much heap memory is currently being used by fielddata on every data -// node in the cluster. +// Returns the amount of heap memory currently used by the field data cache on +// every data node in the cluster. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. +// They are not intended for use by applications. For application consumption, +// use the nodes stats API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-fielddata.html func New(tp elastictransport.Interface) *Fielddata { @@ -270,7 +278,7 @@ func (r Fielddata) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -315,3 +323,110 @@ func (r *Fielddata) Bytes(bytes bytes.Bytes) *Fielddata { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Fielddata) Format(format string) *Fielddata { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Fielddata) H(names ...string) *Fielddata { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Fielddata) Help(help bool) *Fielddata { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Fielddata) Local(local bool) *Fielddata { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Fielddata) MasterTimeout(duration string) *Fielddata { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Fielddata) S(names ...string) *Fielddata { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Fielddata) V(v bool) *Fielddata { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Fielddata) ErrorTrace(errortrace bool) *Fielddata { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Fielddata) FilterPath(filterpaths ...string) *Fielddata { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Fielddata) Human(human bool) *Fielddata { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Fielddata) Pretty(pretty bool) *Fielddata { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/fielddata/response.go b/typedapi/cat/fielddata/response.go index d580a682e7..75072918f5 100644 --- a/typedapi/cat/fielddata/response.go +++ b/typedapi/cat/fielddata/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package fielddata @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package fielddata // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/fielddata/CatFielddataResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/fielddata/CatFielddataResponse.ts#L22-L24 type Response []types.FielddataRecord diff --git a/typedapi/cat/health/health.go b/typedapi/cat/health/health.go index 0cef73a670..c3fcdf4991 100644 --- a/typedapi/cat/health/health.go +++ b/typedapi/cat/health/health.go @@ -16,9 +16,23 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Returns a concise representation of the cluster health. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Returns the health status of a cluster, similar to the cluster health API. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. +// They are not intended for use by applications. For application consumption, +// use the cluster health API. +// This API is often used to check malfunctioning clusters. +// To help you track cluster health alongside log files and alerting systems, +// the API returns timestamps in two formats: +// `HH:MM:SS`, which is human-readable but includes no date information; +// `Unix epoch time`, which is machine-sortable and includes date information. +// The latter format is useful for cluster recoveries that take multiple days. +// You can use the cat health API to verify cluster health across multiple +// nodes. +// You also can use the API to track the recovery of a large cluster over a +// longer period of time. package health import ( @@ -27,7 +41,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -70,7 +83,21 @@ func NewHealthFunc(tp elastictransport.Interface) NewHealth { } } -// Returns a concise representation of the cluster health. +// Returns the health status of a cluster, similar to the cluster health API. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. +// They are not intended for use by applications. For application consumption, +// use the cluster health API. +// This API is often used to check malfunctioning clusters. +// To help you track cluster health alongside log files and alerting systems, +// the API returns timestamps in two formats: +// `HH:MM:SS`, which is human-readable but includes no date information; +// `Unix epoch time`, which is machine-sortable and includes date information. +// The latter format is useful for cluster recoveries that take multiple days. +// You can use the cat health API to verify cluster health across multiple +// nodes. +// You also can use the API to track the recovery of a large cluster over a +// longer period of time. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-health.html func New(tp elastictransport.Interface) *Health { @@ -250,7 +277,7 @@ func (r Health) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -293,3 +320,110 @@ func (r *Health) Ts(ts bool) *Health { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Health) Format(format string) *Health { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Health) H(names ...string) *Health { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Health) Help(help bool) *Health { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Health) Local(local bool) *Health { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Health) MasterTimeout(duration string) *Health { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Health) S(names ...string) *Health { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Health) V(v bool) *Health { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Health) ErrorTrace(errortrace bool) *Health { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Health) FilterPath(filterpaths ...string) *Health { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Health) Human(human bool) *Health { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Health) Pretty(pretty bool) *Health { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/health/response.go b/typedapi/cat/health/response.go index 69d741a51e..98935fbd63 100644 --- a/typedapi/cat/health/response.go +++ b/typedapi/cat/health/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package health @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package health // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/health/CatHealthResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/health/CatHealthResponse.ts#L22-L24 type Response []types.HealthRecord diff --git a/typedapi/cat/help/help.go b/typedapi/cat/help/help.go index ce8decac0a..a85b4bbb1a 100644 --- a/typedapi/cat/help/help.go +++ b/typedapi/cat/help/help.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns help for the Cat APIs. package help @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -246,7 +246,7 @@ func (r Help) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -273,3 +273,110 @@ func (r *Help) Header(key, value string) *Help { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Help) Format(format string) *Help { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Help) H(names ...string) *Help { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Help) Help(help bool) *Help { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Help) Local(local bool) *Help { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Help) MasterTimeout(duration string) *Help { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Help) S(names ...string) *Help { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Help) V(v bool) *Help { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Help) ErrorTrace(errortrace bool) *Help { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Help) FilterPath(filterpaths ...string) *Help { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Help) Human(human bool) *Help { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Help) Pretty(pretty bool) *Help { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/help/response.go b/typedapi/cat/help/response.go index d0ba3ed286..684c9d0c09 100644 --- a/typedapi/cat/help/response.go +++ b/typedapi/cat/help/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package help @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package help // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/help/CatHelpResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/help/CatHelpResponse.ts#L22-L24 type Response []types.HelpRecord diff --git a/typedapi/cat/indices/indices.go b/typedapi/cat/indices/indices.go index 717a429364..6f46ab8440 100644 --- a/typedapi/cat/indices/indices.go +++ b/typedapi/cat/indices/indices.go @@ -16,10 +16,22 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Returns information about indices: number of primaries and replicas, document -// counts, disk size, ... +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Returns high-level information about indices in a cluster, including backing +// indices for data streams. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. +// They are not intended for use by applications. For application consumption, +// use the get index API. +// Use the cat indices API to get the following information for each index in a +// cluster: shard count; document count; deleted document count; primary store +// size; total store size of all shards, including shard replicas. +// These metrics are retrieved directly from Lucene, which Elasticsearch uses +// internally to power indexing and search. As a result, all document counts +// include hidden nested documents. +// To get an accurate count of Elasticsearch documents, use the cat count or +// count APIs. package indices import ( @@ -28,7 +40,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -80,8 +91,20 @@ func NewIndicesFunc(tp elastictransport.Interface) NewIndices { } } -// Returns information about indices: number of primaries and replicas, document -// counts, disk size, ... +// Returns high-level information about indices in a cluster, including backing +// indices for data streams. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. +// They are not intended for use by applications. For application consumption, +// use the get index API. +// Use the cat indices API to get the following information for each index in a +// cluster: shard count; document count; deleted document count; primary store +// size; total store size of all shards, including shard replicas. +// These metrics are retrieved directly from Lucene, which Elasticsearch uses +// internally to power indexing and search. As a result, all document counts +// include hidden nested documents. +// To get an accurate count of Elasticsearch documents, use the cat count or +// count APIs. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-indices.html func New(tp elastictransport.Interface) *Indices { @@ -274,7 +297,7 @@ func (r Indices) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -367,3 +390,110 @@ func (r *Indices) Time(time timeunit.TimeUnit) *Indices { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Indices) Format(format string) *Indices { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Indices) H(names ...string) *Indices { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Indices) Help(help bool) *Indices { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Indices) Local(local bool) *Indices { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Indices) MasterTimeout(duration string) *Indices { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Indices) S(names ...string) *Indices { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Indices) V(v bool) *Indices { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Indices) ErrorTrace(errortrace bool) *Indices { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Indices) FilterPath(filterpaths ...string) *Indices { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Indices) Human(human bool) *Indices { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Indices) Pretty(pretty bool) *Indices { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/indices/response.go b/typedapi/cat/indices/response.go index f87aaf0ec6..1376fc4337 100644 --- a/typedapi/cat/indices/response.go +++ b/typedapi/cat/indices/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package indices @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package indices // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/indices/CatIndicesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/indices/CatIndicesResponse.ts#L22-L24 type Response []types.IndicesRecord diff --git a/typedapi/cat/master/master.go b/typedapi/cat/master/master.go index 492fb19593..3fe56cb231 100644 --- a/typedapi/cat/master/master.go +++ b/typedapi/cat/master/master.go @@ -16,9 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns information about the master node. +// Returns information about the master node, including the ID, bound IP +// address, and name. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the nodes info API. package master import ( @@ -27,9 +31,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -68,7 +72,11 @@ func NewMasterFunc(tp elastictransport.Interface) NewMaster { } } -// Returns information about the master node. +// Returns information about the master node, including the ID, bound IP +// address, and name. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the nodes info API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-master.html func New(tp elastictransport.Interface) *Master { @@ -248,7 +256,7 @@ func (r Master) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +283,110 @@ func (r *Master) Header(key, value string) *Master { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Master) Format(format string) *Master { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Master) H(names ...string) *Master { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Master) Help(help bool) *Master { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Master) Local(local bool) *Master { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Master) MasterTimeout(duration string) *Master { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Master) S(names ...string) *Master { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Master) V(v bool) *Master { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Master) ErrorTrace(errortrace bool) *Master { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Master) FilterPath(filterpaths ...string) *Master { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Master) Human(human bool) *Master { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Master) Pretty(pretty bool) *Master { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/master/response.go b/typedapi/cat/master/response.go index 48a0296476..3a6b525185 100644 --- a/typedapi/cat/master/response.go +++ b/typedapi/cat/master/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package master @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package master // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/master/CatMasterResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/master/CatMasterResponse.ts#L22-L24 type Response []types.MasterRecord diff --git a/typedapi/cat/mldatafeeds/ml_datafeeds.go b/typedapi/cat/mldatafeeds/ml_datafeeds.go index c63cbe09ad..73c107c367 100644 --- a/typedapi/cat/mldatafeeds/ml_datafeeds.go +++ b/typedapi/cat/mldatafeeds/ml_datafeeds.go @@ -16,9 +16,17 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Gets configuration and usage information about datafeeds. +// Returns configuration and usage information about datafeeds. +// This API returns a maximum of 10,000 datafeeds. +// If the Elasticsearch security features are enabled, you must have +// `monitor_ml`, `monitor`, `manage_ml`, or `manage` +// cluster privileges to use this API. +// +// IMPORTANT: cat APIs are only intended for human consumption using the Kibana +// console or command line. They are not intended for use by applications. For +// application consumption, use the get datafeed statistics API. package mldatafeeds import ( @@ -27,7 +35,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -77,7 +84,15 @@ func NewMlDatafeedsFunc(tp elastictransport.Interface) NewMlDatafeeds { } } -// Gets configuration and usage information about datafeeds. +// Returns configuration and usage information about datafeeds. +// This API returns a maximum of 10,000 datafeeds. +// If the Elasticsearch security features are enabled, you must have +// `monitor_ml`, `monitor`, `manage_ml`, or `manage` +// cluster privileges to use this API. +// +// IMPORTANT: cat APIs are only intended for human consumption using the Kibana +// console or command line. They are not intended for use by applications. For +// application consumption, use the get datafeed statistics API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-datafeeds.html func New(tp elastictransport.Interface) *MlDatafeeds { @@ -274,7 +289,7 @@ func (r MlDatafeeds) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -361,3 +376,92 @@ func (r *MlDatafeeds) Time(time timeunit.TimeUnit) *MlDatafeeds { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *MlDatafeeds) Format(format string) *MlDatafeeds { + r.values.Set("format", format) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *MlDatafeeds) Help(help bool) *MlDatafeeds { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *MlDatafeeds) Local(local bool) *MlDatafeeds { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *MlDatafeeds) MasterTimeout(duration string) *MlDatafeeds { + r.values.Set("master_timeout", duration) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *MlDatafeeds) V(v bool) *MlDatafeeds { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *MlDatafeeds) ErrorTrace(errortrace bool) *MlDatafeeds { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *MlDatafeeds) FilterPath(filterpaths ...string) *MlDatafeeds { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *MlDatafeeds) Human(human bool) *MlDatafeeds { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *MlDatafeeds) Pretty(pretty bool) *MlDatafeeds { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/mldatafeeds/response.go b/typedapi/cat/mldatafeeds/response.go index 812f9c3f34..9a89eb6114 100644 --- a/typedapi/cat/mldatafeeds/response.go +++ b/typedapi/cat/mldatafeeds/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package mldatafeeds @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mldatafeeds // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/ml_datafeeds/CatDatafeedsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/ml_datafeeds/CatDatafeedsResponse.ts#L22-L24 type Response []types.DatafeedsRecord diff --git a/typedapi/cat/mldataframeanalytics/ml_data_frame_analytics.go b/typedapi/cat/mldataframeanalytics/ml_data_frame_analytics.go index 71c0acaaba..c1b44ba2c8 100644 --- a/typedapi/cat/mldataframeanalytics/ml_data_frame_analytics.go +++ b/typedapi/cat/mldataframeanalytics/ml_data_frame_analytics.go @@ -16,9 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Gets configuration and usage information about data frame analytics jobs. +// Returns configuration and usage information about data frame analytics jobs. +// +// IMPORTANT: cat APIs are only intended for human consumption using the Kibana +// console or command line. They are not intended for use by applications. For +// application consumption, use the get data frame analytics jobs statistics +// API. package mldataframeanalytics import ( @@ -27,7 +32,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -77,7 +81,12 @@ func NewMlDataFrameAnalyticsFunc(tp elastictransport.Interface) NewMlDataFrameAn } } -// Gets configuration and usage information about data frame analytics jobs. +// Returns configuration and usage information about data frame analytics jobs. +// +// IMPORTANT: cat APIs are only intended for human consumption using the Kibana +// console or command line. They are not intended for use by applications. For +// application consumption, use the get data frame analytics jobs statistics +// API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-dfanalytics.html func New(tp elastictransport.Interface) *MlDataFrameAnalytics { @@ -278,7 +287,7 @@ func (r MlDataFrameAnalytics) IsSuccess(providedCtx context.Context) (bool, erro if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -364,3 +373,92 @@ func (r *MlDataFrameAnalytics) Time(duration string) *MlDataFrameAnalytics { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *MlDataFrameAnalytics) Format(format string) *MlDataFrameAnalytics { + r.values.Set("format", format) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *MlDataFrameAnalytics) Help(help bool) *MlDataFrameAnalytics { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *MlDataFrameAnalytics) Local(local bool) *MlDataFrameAnalytics { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *MlDataFrameAnalytics) MasterTimeout(duration string) *MlDataFrameAnalytics { + r.values.Set("master_timeout", duration) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *MlDataFrameAnalytics) V(v bool) *MlDataFrameAnalytics { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *MlDataFrameAnalytics) ErrorTrace(errortrace bool) *MlDataFrameAnalytics { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *MlDataFrameAnalytics) FilterPath(filterpaths ...string) *MlDataFrameAnalytics { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *MlDataFrameAnalytics) Human(human bool) *MlDataFrameAnalytics { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *MlDataFrameAnalytics) Pretty(pretty bool) *MlDataFrameAnalytics { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/mldataframeanalytics/response.go b/typedapi/cat/mldataframeanalytics/response.go index 121419ba94..c51cdba9e2 100644 --- a/typedapi/cat/mldataframeanalytics/response.go +++ b/typedapi/cat/mldataframeanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package mldataframeanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mldataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/ml_data_frame_analytics/CatDataFrameAnalyticsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/ml_data_frame_analytics/CatDataFrameAnalyticsResponse.ts#L22-L24 type Response []types.DataFrameAnalyticsRecord diff --git a/typedapi/cat/mljobs/ml_jobs.go b/typedapi/cat/mljobs/ml_jobs.go index cb1e2de9cd..111652f3cc 100644 --- a/typedapi/cat/mljobs/ml_jobs.go +++ b/typedapi/cat/mljobs/ml_jobs.go @@ -16,9 +16,17 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Gets configuration and usage information about anomaly detection jobs. +// Returns configuration and usage information for anomaly detection jobs. +// This API returns a maximum of 10,000 jobs. +// If the Elasticsearch security features are enabled, you must have +// `monitor_ml`, +// `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. +// +// IMPORTANT: cat APIs are only intended for human consumption using the Kibana +// console or command line. They are not intended for use by applications. For +// application consumption, use the get anomaly detection job statistics API. package mljobs import ( @@ -27,7 +35,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -78,7 +85,15 @@ func NewMlJobsFunc(tp elastictransport.Interface) NewMlJobs { } } -// Gets configuration and usage information about anomaly detection jobs. +// Returns configuration and usage information for anomaly detection jobs. +// This API returns a maximum of 10,000 jobs. +// If the Elasticsearch security features are enabled, you must have +// `monitor_ml`, +// `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. +// +// IMPORTANT: cat APIs are only intended for human consumption using the Kibana +// console or command line. They are not intended for use by applications. For +// application consumption, use the get anomaly detection job statistics API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-anomaly-detectors.html func New(tp elastictransport.Interface) *MlJobs { @@ -275,7 +290,7 @@ func (r MlJobs) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -370,3 +385,92 @@ func (r *MlJobs) Time(time timeunit.TimeUnit) *MlJobs { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *MlJobs) Format(format string) *MlJobs { + r.values.Set("format", format) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *MlJobs) Help(help bool) *MlJobs { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *MlJobs) Local(local bool) *MlJobs { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *MlJobs) MasterTimeout(duration string) *MlJobs { + r.values.Set("master_timeout", duration) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *MlJobs) V(v bool) *MlJobs { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *MlJobs) ErrorTrace(errortrace bool) *MlJobs { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *MlJobs) FilterPath(filterpaths ...string) *MlJobs { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *MlJobs) Human(human bool) *MlJobs { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *MlJobs) Pretty(pretty bool) *MlJobs { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/mljobs/response.go b/typedapi/cat/mljobs/response.go index 635a14113c..5695396e2c 100644 --- a/typedapi/cat/mljobs/response.go +++ b/typedapi/cat/mljobs/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package mljobs @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mljobs // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/ml_jobs/CatJobsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/ml_jobs/CatJobsResponse.ts#L22-L24 type Response []types.JobsRecord diff --git a/typedapi/cat/mltrainedmodels/ml_trained_models.go b/typedapi/cat/mltrainedmodels/ml_trained_models.go index 1c42887ecc..aaa8358400 100644 --- a/typedapi/cat/mltrainedmodels/ml_trained_models.go +++ b/typedapi/cat/mltrainedmodels/ml_trained_models.go @@ -16,9 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Gets configuration and usage information about inference trained models. +// Returns configuration and usage information about inference trained models. +// +// IMPORTANT: cat APIs are only intended for human consumption using the Kibana +// console or command line. They are not intended for use by applications. For +// application consumption, use the get trained models statistics API. package mltrainedmodels import ( @@ -27,7 +31,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -77,7 +80,11 @@ func NewMlTrainedModelsFunc(tp elastictransport.Interface) NewMlTrainedModels { } } -// Gets configuration and usage information about inference trained models. +// Returns configuration and usage information about inference trained models. +// +// IMPORTANT: cat APIs are only intended for human consumption using the Kibana +// console or command line. They are not intended for use by applications. For +// application consumption, use the get trained models statistics API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-trained-model.html func New(tp elastictransport.Interface) *MlTrainedModels { @@ -274,7 +281,7 @@ func (r MlTrainedModels) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -373,3 +380,92 @@ func (r *MlTrainedModels) Size(size int) *MlTrainedModels { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *MlTrainedModels) Format(format string) *MlTrainedModels { + r.values.Set("format", format) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *MlTrainedModels) Help(help bool) *MlTrainedModels { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *MlTrainedModels) Local(local bool) *MlTrainedModels { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *MlTrainedModels) MasterTimeout(duration string) *MlTrainedModels { + r.values.Set("master_timeout", duration) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *MlTrainedModels) V(v bool) *MlTrainedModels { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *MlTrainedModels) ErrorTrace(errortrace bool) *MlTrainedModels { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *MlTrainedModels) FilterPath(filterpaths ...string) *MlTrainedModels { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *MlTrainedModels) Human(human bool) *MlTrainedModels { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *MlTrainedModels) Pretty(pretty bool) *MlTrainedModels { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/mltrainedmodels/response.go b/typedapi/cat/mltrainedmodels/response.go index d15b961ae4..0e4dd59ecf 100644 --- a/typedapi/cat/mltrainedmodels/response.go +++ b/typedapi/cat/mltrainedmodels/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package mltrainedmodels @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mltrainedmodels // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/ml_trained_models/CatTrainedModelsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/ml_trained_models/CatTrainedModelsResponse.ts#L22-L24 type Response []types.TrainedModelsRecord diff --git a/typedapi/cat/nodeattrs/nodeattrs.go b/typedapi/cat/nodeattrs/nodeattrs.go index 7ece51f382..698471053a 100644 --- a/typedapi/cat/nodeattrs/nodeattrs.go +++ b/typedapi/cat/nodeattrs/nodeattrs.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns information about custom node attributes. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the nodes info API. package nodeattrs import ( @@ -27,9 +30,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -69,6 +72,9 @@ func NewNodeattrsFunc(tp elastictransport.Interface) NewNodeattrs { } // Returns information about custom node attributes. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the nodes info API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-nodeattrs.html func New(tp elastictransport.Interface) *Nodeattrs { @@ -248,7 +254,7 @@ func (r Nodeattrs) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +281,110 @@ func (r *Nodeattrs) Header(key, value string) *Nodeattrs { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Nodeattrs) Format(format string) *Nodeattrs { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Nodeattrs) H(names ...string) *Nodeattrs { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Nodeattrs) Help(help bool) *Nodeattrs { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Nodeattrs) Local(local bool) *Nodeattrs { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Nodeattrs) MasterTimeout(duration string) *Nodeattrs { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Nodeattrs) S(names ...string) *Nodeattrs { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Nodeattrs) V(v bool) *Nodeattrs { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Nodeattrs) ErrorTrace(errortrace bool) *Nodeattrs { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Nodeattrs) FilterPath(filterpaths ...string) *Nodeattrs { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Nodeattrs) Human(human bool) *Nodeattrs { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Nodeattrs) Pretty(pretty bool) *Nodeattrs { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/nodeattrs/response.go b/typedapi/cat/nodeattrs/response.go index 4d477c8725..007d1284a8 100644 --- a/typedapi/cat/nodeattrs/response.go +++ b/typedapi/cat/nodeattrs/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package nodeattrs @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package nodeattrs // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/nodeattrs/CatNodeAttributesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/nodeattrs/CatNodeAttributesResponse.ts#L22-L24 type Response []types.NodeAttributesRecord diff --git a/typedapi/cat/nodes/nodes.go b/typedapi/cat/nodes/nodes.go index 48e1df0735..155c0ece24 100644 --- a/typedapi/cat/nodes/nodes.go +++ b/typedapi/cat/nodes/nodes.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns basic statistics about performance of cluster nodes. +// Returns information about the nodes in a cluster. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the nodes info API. package nodes import ( @@ -27,7 +30,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -70,7 +72,10 @@ func NewNodesFunc(tp elastictransport.Interface) NewNodes { } } -// Returns basic statistics about performance of cluster nodes. +// Returns information about the nodes in a cluster. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the nodes info API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-nodes.html func New(tp elastictransport.Interface) *Nodes { @@ -250,7 +255,7 @@ func (r Nodes) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -302,3 +307,110 @@ func (r *Nodes) IncludeUnloadedSegments(includeunloadedsegments bool) *Nodes { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Nodes) Format(format string) *Nodes { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Nodes) H(names ...string) *Nodes { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Nodes) Help(help bool) *Nodes { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Nodes) Local(local bool) *Nodes { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Nodes) MasterTimeout(duration string) *Nodes { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Nodes) S(names ...string) *Nodes { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Nodes) V(v bool) *Nodes { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Nodes) ErrorTrace(errortrace bool) *Nodes { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Nodes) FilterPath(filterpaths ...string) *Nodes { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Nodes) Human(human bool) *Nodes { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Nodes) Pretty(pretty bool) *Nodes { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/nodes/response.go b/typedapi/cat/nodes/response.go index a8465fe194..8e95bf2f28 100644 --- a/typedapi/cat/nodes/response.go +++ b/typedapi/cat/nodes/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package nodes @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package nodes // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/nodes/CatNodesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/nodes/CatNodesResponse.ts#L22-L24 type Response []types.NodesRecord diff --git a/typedapi/cat/pendingtasks/pending_tasks.go b/typedapi/cat/pendingtasks/pending_tasks.go index 1880dbfe80..98ebcddad3 100644 --- a/typedapi/cat/pendingtasks/pending_tasks.go +++ b/typedapi/cat/pendingtasks/pending_tasks.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns a concise representation of the cluster pending tasks. +// Returns cluster-level changes that have not yet been executed. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the pending cluster tasks API. package pendingtasks import ( @@ -27,9 +30,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -68,7 +71,10 @@ func NewPendingTasksFunc(tp elastictransport.Interface) NewPendingTasks { } } -// Returns a concise representation of the cluster pending tasks. +// Returns cluster-level changes that have not yet been executed. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the pending cluster tasks API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-pending-tasks.html func New(tp elastictransport.Interface) *PendingTasks { @@ -248,7 +254,7 @@ func (r PendingTasks) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +281,110 @@ func (r *PendingTasks) Header(key, value string) *PendingTasks { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *PendingTasks) Format(format string) *PendingTasks { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *PendingTasks) H(names ...string) *PendingTasks { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *PendingTasks) Help(help bool) *PendingTasks { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *PendingTasks) Local(local bool) *PendingTasks { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *PendingTasks) MasterTimeout(duration string) *PendingTasks { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *PendingTasks) S(names ...string) *PendingTasks { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *PendingTasks) V(v bool) *PendingTasks { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PendingTasks) ErrorTrace(errortrace bool) *PendingTasks { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PendingTasks) FilterPath(filterpaths ...string) *PendingTasks { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PendingTasks) Human(human bool) *PendingTasks { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PendingTasks) Pretty(pretty bool) *PendingTasks { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/pendingtasks/response.go b/typedapi/cat/pendingtasks/response.go index 4d77552e1b..c7352bfde7 100644 --- a/typedapi/cat/pendingtasks/response.go +++ b/typedapi/cat/pendingtasks/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package pendingtasks @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package pendingtasks // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/pending_tasks/CatPendingTasksResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/pending_tasks/CatPendingTasksResponse.ts#L22-L24 type Response []types.PendingTasksRecord diff --git a/typedapi/cat/plugins/plugins.go b/typedapi/cat/plugins/plugins.go index 30802756a0..a7029ddad9 100644 --- a/typedapi/cat/plugins/plugins.go +++ b/typedapi/cat/plugins/plugins.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns information about installed plugins across nodes node. +// Returns a list of plugins running on each node of a cluster. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the nodes info API. package plugins import ( @@ -27,9 +30,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -68,7 +71,10 @@ func NewPluginsFunc(tp elastictransport.Interface) NewPlugins { } } -// Returns information about installed plugins across nodes node. +// Returns a list of plugins running on each node of a cluster. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the nodes info API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-plugins.html func New(tp elastictransport.Interface) *Plugins { @@ -248,7 +254,7 @@ func (r Plugins) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +281,110 @@ func (r *Plugins) Header(key, value string) *Plugins { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Plugins) Format(format string) *Plugins { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Plugins) H(names ...string) *Plugins { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Plugins) Help(help bool) *Plugins { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Plugins) Local(local bool) *Plugins { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Plugins) MasterTimeout(duration string) *Plugins { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Plugins) S(names ...string) *Plugins { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Plugins) V(v bool) *Plugins { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Plugins) ErrorTrace(errortrace bool) *Plugins { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Plugins) FilterPath(filterpaths ...string) *Plugins { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Plugins) Human(human bool) *Plugins { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Plugins) Pretty(pretty bool) *Plugins { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/plugins/response.go b/typedapi/cat/plugins/response.go index 02c8404d7e..63100f54d2 100644 --- a/typedapi/cat/plugins/response.go +++ b/typedapi/cat/plugins/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package plugins @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package plugins // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/plugins/CatPluginsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/plugins/CatPluginsResponse.ts#L22-L24 type Response []types.PluginsRecord diff --git a/typedapi/cat/recovery/recovery.go b/typedapi/cat/recovery/recovery.go index bca4595741..58b202ef8a 100644 --- a/typedapi/cat/recovery/recovery.go +++ b/typedapi/cat/recovery/recovery.go @@ -16,9 +16,18 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Returns information about index shard recoveries, both on-going completed. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Returns information about ongoing and completed shard recoveries. +// Shard recovery is the process of initializing a shard copy, such as restoring +// a primary shard from a snapshot or syncing a replica shard from a primary +// shard. When a shard recovery completes, the recovered shard is available for +// search and indexing. +// For data streams, the API returns information about the stream’s backing +// indices. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the index recovery API. package recovery import ( @@ -27,7 +36,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -76,7 +84,16 @@ func NewRecoveryFunc(tp elastictransport.Interface) NewRecovery { } } -// Returns information about index shard recoveries, both on-going completed. +// Returns information about ongoing and completed shard recoveries. +// Shard recovery is the process of initializing a shard copy, such as restoring +// a primary shard from a snapshot or syncing a replica shard from a primary +// shard. When a shard recovery completes, the recovered shard is available for +// search and indexing. +// For data streams, the API returns information about the stream’s backing +// indices. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the index recovery API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-recovery.html func New(tp elastictransport.Interface) *Recovery { @@ -269,7 +286,7 @@ func (r Recovery) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -332,3 +349,110 @@ func (r *Recovery) Detailed(detailed bool) *Recovery { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Recovery) Format(format string) *Recovery { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Recovery) H(names ...string) *Recovery { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Recovery) Help(help bool) *Recovery { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Recovery) Local(local bool) *Recovery { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Recovery) MasterTimeout(duration string) *Recovery { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Recovery) S(names ...string) *Recovery { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Recovery) V(v bool) *Recovery { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Recovery) ErrorTrace(errortrace bool) *Recovery { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Recovery) FilterPath(filterpaths ...string) *Recovery { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Recovery) Human(human bool) *Recovery { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Recovery) Pretty(pretty bool) *Recovery { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/recovery/response.go b/typedapi/cat/recovery/response.go index 6baac57370..202552ef3f 100644 --- a/typedapi/cat/recovery/response.go +++ b/typedapi/cat/recovery/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package recovery @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package recovery // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/recovery/CatRecoveryResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/recovery/CatRecoveryResponse.ts#L22-L24 type Response []types.RecoveryRecord diff --git a/typedapi/cat/repositories/repositories.go b/typedapi/cat/repositories/repositories.go index fd85c85c7d..55b9b86a04 100644 --- a/typedapi/cat/repositories/repositories.go +++ b/typedapi/cat/repositories/repositories.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns information about snapshot repositories registered in the cluster. +// Returns the snapshot repositories for a cluster. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the get snapshot repository API. package repositories import ( @@ -27,9 +30,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -68,7 +71,10 @@ func NewRepositoriesFunc(tp elastictransport.Interface) NewRepositories { } } -// Returns information about snapshot repositories registered in the cluster. +// Returns the snapshot repositories for a cluster. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the get snapshot repository API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-repositories.html func New(tp elastictransport.Interface) *Repositories { @@ -248,7 +254,7 @@ func (r Repositories) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +281,110 @@ func (r *Repositories) Header(key, value string) *Repositories { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Repositories) Format(format string) *Repositories { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Repositories) H(names ...string) *Repositories { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Repositories) Help(help bool) *Repositories { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Repositories) Local(local bool) *Repositories { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Repositories) MasterTimeout(duration string) *Repositories { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Repositories) S(names ...string) *Repositories { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Repositories) V(v bool) *Repositories { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Repositories) ErrorTrace(errortrace bool) *Repositories { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Repositories) FilterPath(filterpaths ...string) *Repositories { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Repositories) Human(human bool) *Repositories { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Repositories) Pretty(pretty bool) *Repositories { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/repositories/response.go b/typedapi/cat/repositories/response.go index 7e17c4b362..8e842ced76 100644 --- a/typedapi/cat/repositories/response.go +++ b/typedapi/cat/repositories/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package repositories @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package repositories // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/repositories/CatRepositoriesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/repositories/CatRepositoriesResponse.ts#L22-L24 type Response []types.RepositoriesRecord diff --git a/typedapi/cat/segments/response.go b/typedapi/cat/segments/response.go index f21bf54814..3c3d2d8eb6 100644 --- a/typedapi/cat/segments/response.go +++ b/typedapi/cat/segments/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package segments @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package segments // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/segments/CatSegmentsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/segments/CatSegmentsResponse.ts#L22-L24 type Response []types.SegmentsRecord diff --git a/typedapi/cat/segments/segments.go b/typedapi/cat/segments/segments.go index 2825ca14ac..6fadd9190d 100644 --- a/typedapi/cat/segments/segments.go +++ b/typedapi/cat/segments/segments.go @@ -16,9 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Provides low-level information about the segments in the shards of an index. +// Returns low-level information about the Lucene segments in index shards. +// For data streams, the API returns information about the backing indices. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the index segments API. package segments import ( @@ -27,9 +31,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -75,7 +79,11 @@ func NewSegmentsFunc(tp elastictransport.Interface) NewSegments { } } -// Provides low-level information about the segments in the shards of an index. +// Returns low-level information about the Lucene segments in index shards. +// For data streams, the API returns information about the backing indices. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the index segments API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-segments.html func New(tp elastictransport.Interface) *Segments { @@ -268,7 +276,7 @@ func (r Segments) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -316,3 +324,110 @@ func (r *Segments) Bytes(bytes bytes.Bytes) *Segments { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Segments) Format(format string) *Segments { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Segments) H(names ...string) *Segments { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Segments) Help(help bool) *Segments { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Segments) Local(local bool) *Segments { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Segments) MasterTimeout(duration string) *Segments { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Segments) S(names ...string) *Segments { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Segments) V(v bool) *Segments { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Segments) ErrorTrace(errortrace bool) *Segments { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Segments) FilterPath(filterpaths ...string) *Segments { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Segments) Human(human bool) *Segments { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Segments) Pretty(pretty bool) *Segments { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/shards/response.go b/typedapi/cat/shards/response.go index 43ed544b45..3085fc6626 100644 --- a/typedapi/cat/shards/response.go +++ b/typedapi/cat/shards/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package shards @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package shards // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/shards/CatShardsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/shards/CatShardsResponse.ts#L22-L24 type Response []types.ShardsRecord diff --git a/typedapi/cat/shards/shards.go b/typedapi/cat/shards/shards.go index c2d2c9de0e..eded40b56e 100644 --- a/typedapi/cat/shards/shards.go +++ b/typedapi/cat/shards/shards.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Provides a detailed view of shard allocation on nodes. +// Returns information about the shards in a cluster. +// For data streams, the API returns information about the backing indices. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. package shards import ( @@ -27,9 +30,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -75,7 +78,10 @@ func NewShardsFunc(tp elastictransport.Interface) NewShards { } } -// Provides a detailed view of shard allocation on nodes. +// Returns information about the shards in a cluster. +// For data streams, the API returns information about the backing indices. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-shards.html func New(tp elastictransport.Interface) *Shards { @@ -268,7 +274,7 @@ func (r Shards) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -316,3 +322,110 @@ func (r *Shards) Bytes(bytes bytes.Bytes) *Shards { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Shards) Format(format string) *Shards { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Shards) H(names ...string) *Shards { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Shards) Help(help bool) *Shards { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Shards) Local(local bool) *Shards { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Shards) MasterTimeout(duration string) *Shards { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Shards) S(names ...string) *Shards { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Shards) V(v bool) *Shards { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Shards) ErrorTrace(errortrace bool) *Shards { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Shards) FilterPath(filterpaths ...string) *Shards { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Shards) Human(human bool) *Shards { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Shards) Pretty(pretty bool) *Shards { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/snapshots/response.go b/typedapi/cat/snapshots/response.go index 9166de9ce5..0c1e39c8f6 100644 --- a/typedapi/cat/snapshots/response.go +++ b/typedapi/cat/snapshots/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package snapshots @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package snapshots // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/snapshots/CatSnapshotsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/snapshots/CatSnapshotsResponse.ts#L22-L24 type Response []types.SnapshotsRecord diff --git a/typedapi/cat/snapshots/snapshots.go b/typedapi/cat/snapshots/snapshots.go index 5c6d65b9a7..081f00c063 100644 --- a/typedapi/cat/snapshots/snapshots.go +++ b/typedapi/cat/snapshots/snapshots.go @@ -16,9 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns all snapshots in a specific repository. +// Returns information about the snapshots stored in one or more repositories. +// A snapshot is a backup of an index or running Elasticsearch cluster. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the get snapshot API. package snapshots import ( @@ -27,7 +31,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -75,7 +78,11 @@ func NewSnapshotsFunc(tp elastictransport.Interface) NewSnapshots { } } -// Returns all snapshots in a specific repository. +// Returns information about the snapshots stored in one or more repositories. +// A snapshot is a backup of an index or running Elasticsearch cluster. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the get snapshot API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-snapshots.html func New(tp elastictransport.Interface) *Snapshots { @@ -268,7 +275,7 @@ func (r Snapshots) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -316,3 +323,110 @@ func (r *Snapshots) IgnoreUnavailable(ignoreunavailable bool) *Snapshots { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Snapshots) Format(format string) *Snapshots { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Snapshots) H(names ...string) *Snapshots { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Snapshots) Help(help bool) *Snapshots { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Snapshots) Local(local bool) *Snapshots { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Snapshots) MasterTimeout(duration string) *Snapshots { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Snapshots) S(names ...string) *Snapshots { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Snapshots) V(v bool) *Snapshots { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Snapshots) ErrorTrace(errortrace bool) *Snapshots { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Snapshots) FilterPath(filterpaths ...string) *Snapshots { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Snapshots) Human(human bool) *Snapshots { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Snapshots) Pretty(pretty bool) *Snapshots { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/tasks/response.go b/typedapi/cat/tasks/response.go index 6577040a36..cb92f52b4e 100644 --- a/typedapi/cat/tasks/response.go +++ b/typedapi/cat/tasks/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package tasks @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package tasks // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/tasks/CatTasksResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/tasks/CatTasksResponse.ts#L22-L24 type Response []types.TasksRecord diff --git a/typedapi/cat/tasks/tasks.go b/typedapi/cat/tasks/tasks.go index c7a6ccf3e7..c20c20a524 100644 --- a/typedapi/cat/tasks/tasks.go +++ b/typedapi/cat/tasks/tasks.go @@ -16,10 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns information about the tasks currently executing on one or more nodes -// in the cluster. +// Returns information about tasks currently executing in the cluster. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the task management API. package tasks import ( @@ -28,7 +30,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -70,8 +71,10 @@ func NewTasksFunc(tp elastictransport.Interface) NewTasks { } } -// Returns information about the tasks currently executing on one or more nodes -// in the cluster. +// Returns information about tasks currently executing in the cluster. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the task management API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html func New(tp elastictransport.Interface) *Tasks { @@ -251,7 +254,7 @@ func (r Tasks) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -318,3 +321,110 @@ func (r *Tasks) ParentTaskId(parenttaskid string) *Tasks { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Tasks) Format(format string) *Tasks { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Tasks) H(names ...string) *Tasks { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Tasks) Help(help bool) *Tasks { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Tasks) Local(local bool) *Tasks { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Tasks) MasterTimeout(duration string) *Tasks { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Tasks) S(names ...string) *Tasks { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Tasks) V(v bool) *Tasks { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Tasks) ErrorTrace(errortrace bool) *Tasks { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Tasks) FilterPath(filterpaths ...string) *Tasks { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Tasks) Human(human bool) *Tasks { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Tasks) Pretty(pretty bool) *Tasks { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/templates/response.go b/typedapi/cat/templates/response.go index 3b9960cd62..b525ac51fa 100644 --- a/typedapi/cat/templates/response.go +++ b/typedapi/cat/templates/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package templates @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package templates // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/templates/CatTemplatesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/templates/CatTemplatesResponse.ts#L22-L24 type Response []types.TemplatesRecord diff --git a/typedapi/cat/templates/templates.go b/typedapi/cat/templates/templates.go index f35cec9579..eefca7e6ac 100644 --- a/typedapi/cat/templates/templates.go +++ b/typedapi/cat/templates/templates.go @@ -16,9 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Returns information about existing templates. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Returns information about index templates in a cluster. +// You can use index templates to apply index settings and field mappings to new +// indices at creation. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the get index template API. package templates import ( @@ -27,9 +32,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -74,7 +79,12 @@ func NewTemplatesFunc(tp elastictransport.Interface) NewTemplates { } } -// Returns information about existing templates. +// Returns information about index templates in a cluster. +// You can use index templates to apply index settings and field mappings to new +// indices at creation. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the get index template API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-templates.html func New(tp elastictransport.Interface) *Templates { @@ -267,7 +277,7 @@ func (r Templates) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -304,3 +314,110 @@ func (r *Templates) Name(name string) *Templates { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Templates) Format(format string) *Templates { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *Templates) H(names ...string) *Templates { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Templates) Help(help bool) *Templates { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Templates) Local(local bool) *Templates { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Templates) MasterTimeout(duration string) *Templates { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *Templates) S(names ...string) *Templates { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Templates) V(v bool) *Templates { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Templates) ErrorTrace(errortrace bool) *Templates { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Templates) FilterPath(filterpaths ...string) *Templates { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Templates) Human(human bool) *Templates { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Templates) Pretty(pretty bool) *Templates { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/threadpool/response.go b/typedapi/cat/threadpool/response.go index 76409549b1..0cc1deb5b2 100644 --- a/typedapi/cat/threadpool/response.go +++ b/typedapi/cat/threadpool/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package threadpool @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package threadpool // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/thread_pool/CatThreadPoolResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/thread_pool/CatThreadPoolResponse.ts#L22-L24 type Response []types.ThreadPoolRecord diff --git a/typedapi/cat/threadpool/thread_pool.go b/typedapi/cat/threadpool/thread_pool.go index b9528d75bd..b7511058ee 100644 --- a/typedapi/cat/threadpool/thread_pool.go +++ b/typedapi/cat/threadpool/thread_pool.go @@ -16,11 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Returns cluster-wide thread pool statistics per node. -// By default the active, queue and rejected statistics are returned for all -// thread pools. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Returns thread pool statistics for each node in a cluster. +// Returned information includes all built-in thread pools and custom thread +// pools. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the nodes info API. package threadpool import ( @@ -29,9 +32,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -77,9 +80,12 @@ func NewThreadPoolFunc(tp elastictransport.Interface) NewThreadPool { } } -// Returns cluster-wide thread pool statistics per node. -// By default the active, queue and rejected statistics are returned for all -// thread pools. +// Returns thread pool statistics for each node in a cluster. +// Returned information includes all built-in thread pools and custom thread +// pools. +// IMPORTANT: cat APIs are only intended for human consumption using the command +// line or Kibana console. They are not intended for use by applications. For +// application consumption, use the nodes info API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-thread-pool.html func New(tp elastictransport.Interface) *ThreadPool { @@ -272,7 +278,7 @@ func (r ThreadPool) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -317,3 +323,110 @@ func (r *ThreadPool) Time(time timeunit.TimeUnit) *ThreadPool { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *ThreadPool) Format(format string) *ThreadPool { + r.values.Set("format", format) + + return r +} + +// H List of columns to appear in the response. Supports simple wildcards. +// API name: h +func (r *ThreadPool) H(names ...string) *ThreadPool { + r.values.Set("h", strings.Join(names, ",")) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *ThreadPool) Help(help bool) *ThreadPool { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *ThreadPool) Local(local bool) *ThreadPool { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *ThreadPool) MasterTimeout(duration string) *ThreadPool { + r.values.Set("master_timeout", duration) + + return r +} + +// S List of columns that determine how the table should be sorted. +// Sorting defaults to ascending and can be changed by setting `:asc` +// or `:desc` as a suffix to the column name. +// API name: s +func (r *ThreadPool) S(names ...string) *ThreadPool { + r.values.Set("s", strings.Join(names, ",")) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *ThreadPool) V(v bool) *ThreadPool { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ThreadPool) ErrorTrace(errortrace bool) *ThreadPool { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ThreadPool) FilterPath(filterpaths ...string) *ThreadPool { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ThreadPool) Human(human bool) *ThreadPool { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ThreadPool) Pretty(pretty bool) *ThreadPool { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cat/transforms/response.go b/typedapi/cat/transforms/response.go index 337f0ef9a8..ba60297ba2 100644 --- a/typedapi/cat/transforms/response.go +++ b/typedapi/cat/transforms/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package transforms @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package transforms // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/transforms/CatTransformsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/transforms/CatTransformsResponse.ts#L22-L24 type Response []types.TransformsRecord diff --git a/typedapi/cat/transforms/transforms.go b/typedapi/cat/transforms/transforms.go index 934b679308..8861a36cdc 100644 --- a/typedapi/cat/transforms/transforms.go +++ b/typedapi/cat/transforms/transforms.go @@ -16,9 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Gets configuration and usage information about transforms. +// Returns configuration and usage information about transforms. +// +// IMPORTANT: cat APIs are only intended for human consumption using the Kibana +// console or command line. They are not intended for use by applications. For +// application consumption, use the get transform statistics API. package transforms import ( @@ -27,7 +31,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -77,7 +80,11 @@ func NewTransformsFunc(tp elastictransport.Interface) NewTransforms { } } -// Gets configuration and usage information about transforms. +// Returns configuration and usage information about transforms. +// +// IMPORTANT: cat APIs are only intended for human consumption using the Kibana +// console or command line. They are not intended for use by applications. For +// application consumption, use the get transform statistics API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cat-transforms.html func New(tp elastictransport.Interface) *Transforms { @@ -270,7 +277,7 @@ func (r Transforms) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -372,3 +379,92 @@ func (r *Transforms) Size(size int) *Transforms { return r } + +// Format Specifies the format to return the columnar data in, can be set to +// `text`, `json`, `cbor`, `yaml`, or `smile`. +// API name: format +func (r *Transforms) Format(format string) *Transforms { + r.values.Set("format", format) + + return r +} + +// Help When set to `true` will output available columns. This option +// can't be combined with any other query string option. +// API name: help +func (r *Transforms) Help(help bool) *Transforms { + r.values.Set("help", strconv.FormatBool(help)) + + return r +} + +// Local If `true`, the request computes the list of selected nodes from the +// local cluster state. If `false` the list of selected nodes are computed +// from the cluster state of the master node. In both cases the coordinating +// node will send requests for further information to each selected node. +// API name: local +func (r *Transforms) Local(local bool) *Transforms { + r.values.Set("local", strconv.FormatBool(local)) + + return r +} + +// MasterTimeout Period to wait for a connection to the master node. +// API name: master_timeout +func (r *Transforms) MasterTimeout(duration string) *Transforms { + r.values.Set("master_timeout", duration) + + return r +} + +// V When set to `true` will enable verbose output. +// API name: v +func (r *Transforms) V(v bool) *Transforms { + r.values.Set("v", strconv.FormatBool(v)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Transforms) ErrorTrace(errortrace bool) *Transforms { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Transforms) FilterPath(filterpaths ...string) *Transforms { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Transforms) Human(human bool) *Transforms { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Transforms) Pretty(pretty bool) *Transforms { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ccr/deleteautofollowpattern/delete_auto_follow_pattern.go b/typedapi/ccr/deleteautofollowpattern/delete_auto_follow_pattern.go index 54499e2cd2..e091b730b8 100644 --- a/typedapi/ccr/deleteautofollowpattern/delete_auto_follow_pattern.go +++ b/typedapi/ccr/deleteautofollowpattern/delete_auto_follow_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes auto-follow patterns. package deleteautofollowpattern @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -262,7 +262,7 @@ func (r DeleteAutoFollowPattern) IsSuccess(providedCtx context.Context) (bool, e if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -298,3 +298,47 @@ func (r *DeleteAutoFollowPattern) _name(name string) *DeleteAutoFollowPattern { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteAutoFollowPattern) ErrorTrace(errortrace bool) *DeleteAutoFollowPattern { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteAutoFollowPattern) FilterPath(filterpaths ...string) *DeleteAutoFollowPattern { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteAutoFollowPattern) Human(human bool) *DeleteAutoFollowPattern { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteAutoFollowPattern) Pretty(pretty bool) *DeleteAutoFollowPattern { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ccr/deleteautofollowpattern/response.go b/typedapi/ccr/deleteautofollowpattern/response.go index 7f62ec1cd2..8a169c657a 100644 --- a/typedapi/ccr/deleteautofollowpattern/response.go +++ b/typedapi/ccr/deleteautofollowpattern/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deleteautofollowpattern // Response holds the response body struct for the package deleteautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/delete_auto_follow_pattern/DeleteAutoFollowPatternResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/delete_auto_follow_pattern/DeleteAutoFollowPatternResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ccr/follow/follow.go b/typedapi/ccr/follow/follow.go index 78e74d1a38..e48f94f752 100644 --- a/typedapi/ccr/follow/follow.go +++ b/typedapi/ccr/follow/follow.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates a new follower index configured to follow the referenced leader // index. @@ -31,6 +31,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -327,6 +328,50 @@ func (r *Follow) WaitForActiveShards(waitforactiveshards string) *Follow { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Follow) ErrorTrace(errortrace bool) *Follow { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Follow) FilterPath(filterpaths ...string) *Follow { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Follow) Human(human bool) *Follow { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Follow) Pretty(pretty bool) *Follow { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: leader_index func (r *Follow) LeaderIndex(indexname string) *Follow { r.req.LeaderIndex = &indexname diff --git a/typedapi/ccr/follow/request.go b/typedapi/ccr/follow/request.go index 0acf53dfa9..27c56f7bad 100644 --- a/typedapi/ccr/follow/request.go +++ b/typedapi/ccr/follow/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package follow @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package follow // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/follow/CreateFollowIndexRequest.ts#L25-L51 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/follow/CreateFollowIndexRequest.ts#L25-L51 type Request struct { LeaderIndex *string `json:"leader_index,omitempty"` MaxOutstandingReadRequests *int64 `json:"max_outstanding_read_requests,omitempty"` @@ -52,6 +52,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -87,7 +88,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "max_outstanding_read_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -102,7 +103,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "max_outstanding_write_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -117,7 +118,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "max_read_request_operation_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -149,7 +150,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "max_write_buffer_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -176,7 +177,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.MaxWriteBufferSize = &o case "max_write_request_operation_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ccr/follow/response.go b/typedapi/ccr/follow/response.go index f45ac71b50..bfaa6c5700 100644 --- a/typedapi/ccr/follow/response.go +++ b/typedapi/ccr/follow/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package follow // Response holds the response body struct for the package follow // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/follow/CreateFollowIndexResponse.ts#L20-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/follow/CreateFollowIndexResponse.ts#L20-L26 type Response struct { FollowIndexCreated bool `json:"follow_index_created"` FollowIndexShardsAcked bool `json:"follow_index_shards_acked"` diff --git a/typedapi/ccr/followinfo/follow_info.go b/typedapi/ccr/followinfo/follow_info.go index 2fdbc04f94..a5e5de2fb5 100644 --- a/typedapi/ccr/followinfo/follow_info.go +++ b/typedapi/ccr/followinfo/follow_info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves information about all follower indices, including parameters and // status for each follower index @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r FollowInfo) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -301,3 +301,47 @@ func (r *FollowInfo) _index(index string) *FollowInfo { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *FollowInfo) ErrorTrace(errortrace bool) *FollowInfo { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *FollowInfo) FilterPath(filterpaths ...string) *FollowInfo { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *FollowInfo) Human(human bool) *FollowInfo { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *FollowInfo) Pretty(pretty bool) *FollowInfo { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ccr/followinfo/response.go b/typedapi/ccr/followinfo/response.go index 4ab60f52d8..26835b9bd7 100644 --- a/typedapi/ccr/followinfo/response.go +++ b/typedapi/ccr/followinfo/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package followinfo @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package followinfo // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/follow_info/FollowInfoResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/follow_info/FollowInfoResponse.ts#L22-L24 type Response struct { FollowerIndices []types.FollowerIndex `json:"follower_indices"` } diff --git a/typedapi/ccr/followstats/follow_stats.go b/typedapi/ccr/followstats/follow_stats.go index aaf2ef5971..b7ed93773a 100644 --- a/typedapi/ccr/followstats/follow_stats.go +++ b/typedapi/ccr/followstats/follow_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves follower stats. return shard-level stats about the following tasks // associated with each shard for the specified indices. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r FollowStats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -301,3 +301,47 @@ func (r *FollowStats) _index(index string) *FollowStats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *FollowStats) ErrorTrace(errortrace bool) *FollowStats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *FollowStats) FilterPath(filterpaths ...string) *FollowStats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *FollowStats) Human(human bool) *FollowStats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *FollowStats) Pretty(pretty bool) *FollowStats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ccr/followstats/response.go b/typedapi/ccr/followstats/response.go index 65eb420637..e26f15568c 100644 --- a/typedapi/ccr/followstats/response.go +++ b/typedapi/ccr/followstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package followstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package followstats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/follow_stats/FollowIndexStatsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/follow_stats/FollowIndexStatsResponse.ts#L22-L24 type Response struct { Indices []types.FollowIndexStats `json:"indices"` } diff --git a/typedapi/ccr/forgetfollower/forget_follower.go b/typedapi/ccr/forgetfollower/forget_follower.go index 90e3b70798..69f9423a43 100644 --- a/typedapi/ccr/forgetfollower/forget_follower.go +++ b/typedapi/ccr/forgetfollower/forget_follower.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Removes the follower retention leases from the leader. package forgetfollower @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -315,6 +316,50 @@ func (r *ForgetFollower) _index(index string) *ForgetFollower { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ForgetFollower) ErrorTrace(errortrace bool) *ForgetFollower { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ForgetFollower) FilterPath(filterpaths ...string) *ForgetFollower { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ForgetFollower) Human(human bool) *ForgetFollower { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ForgetFollower) Pretty(pretty bool) *ForgetFollower { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: follower_cluster func (r *ForgetFollower) FollowerCluster(followercluster string) *ForgetFollower { diff --git a/typedapi/ccr/forgetfollower/request.go b/typedapi/ccr/forgetfollower/request.go index 5b903df06d..f727ae2f49 100644 --- a/typedapi/ccr/forgetfollower/request.go +++ b/typedapi/ccr/forgetfollower/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package forgetfollower @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package forgetfollower // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/forget_follower/ForgetFollowerIndexRequest.ts#L23-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/forget_follower/ForgetFollowerIndexRequest.ts#L23-L38 type Request struct { FollowerCluster *string `json:"follower_cluster,omitempty"` FollowerIndex *string `json:"follower_index,omitempty"` @@ -42,6 +42,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ccr/forgetfollower/response.go b/typedapi/ccr/forgetfollower/response.go index 12e19853bc..5e979a8089 100644 --- a/typedapi/ccr/forgetfollower/response.go +++ b/typedapi/ccr/forgetfollower/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package forgetfollower @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package forgetfollower // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/forget_follower/ForgetFollowerIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/forget_follower/ForgetFollowerIndexResponse.ts#L22-L24 type Response struct { Shards_ types.ShardStatistics `json:"_shards"` } diff --git a/typedapi/ccr/getautofollowpattern/get_auto_follow_pattern.go b/typedapi/ccr/getautofollowpattern/get_auto_follow_pattern.go index a9b39283dd..0b451f40e7 100644 --- a/typedapi/ccr/getautofollowpattern/get_auto_follow_pattern.go +++ b/typedapi/ccr/getautofollowpattern/get_auto_follow_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Gets configured auto-follow patterns. Returns the specified auto-follow // pattern collection. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -269,7 +269,7 @@ func (r GetAutoFollowPattern) IsSuccess(providedCtx context.Context) (bool, erro if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -306,3 +306,47 @@ func (r *GetAutoFollowPattern) Name(name string) *GetAutoFollowPattern { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetAutoFollowPattern) ErrorTrace(errortrace bool) *GetAutoFollowPattern { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetAutoFollowPattern) FilterPath(filterpaths ...string) *GetAutoFollowPattern { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetAutoFollowPattern) Human(human bool) *GetAutoFollowPattern { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetAutoFollowPattern) Pretty(pretty bool) *GetAutoFollowPattern { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ccr/getautofollowpattern/response.go b/typedapi/ccr/getautofollowpattern/response.go index 9a3a635487..1bf134d3c6 100644 --- a/typedapi/ccr/getautofollowpattern/response.go +++ b/typedapi/ccr/getautofollowpattern/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getautofollowpattern @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/get_auto_follow_pattern/GetAutoFollowPatternResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/get_auto_follow_pattern/GetAutoFollowPatternResponse.ts#L22-L24 type Response struct { Patterns []types.AutoFollowPattern `json:"patterns"` } diff --git a/typedapi/ccr/pauseautofollowpattern/pause_auto_follow_pattern.go b/typedapi/ccr/pauseautofollowpattern/pause_auto_follow_pattern.go index a008501244..26b3af3b0c 100644 --- a/typedapi/ccr/pauseautofollowpattern/pause_auto_follow_pattern.go +++ b/typedapi/ccr/pauseautofollowpattern/pause_auto_follow_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Pauses an auto-follow pattern package pauseautofollowpattern @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r PauseAutoFollowPattern) IsSuccess(providedCtx context.Context) (bool, er if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -301,3 +301,47 @@ func (r *PauseAutoFollowPattern) _name(name string) *PauseAutoFollowPattern { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PauseAutoFollowPattern) ErrorTrace(errortrace bool) *PauseAutoFollowPattern { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PauseAutoFollowPattern) FilterPath(filterpaths ...string) *PauseAutoFollowPattern { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PauseAutoFollowPattern) Human(human bool) *PauseAutoFollowPattern { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PauseAutoFollowPattern) Pretty(pretty bool) *PauseAutoFollowPattern { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ccr/pauseautofollowpattern/response.go b/typedapi/ccr/pauseautofollowpattern/response.go index bce1e33fb8..381dbefa54 100644 --- a/typedapi/ccr/pauseautofollowpattern/response.go +++ b/typedapi/ccr/pauseautofollowpattern/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package pauseautofollowpattern // Response holds the response body struct for the package pauseautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/pause_auto_follow_pattern/PauseAutoFollowPatternResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/pause_auto_follow_pattern/PauseAutoFollowPatternResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ccr/pausefollow/pause_follow.go b/typedapi/ccr/pausefollow/pause_follow.go index c0e979ffe1..85a0acbc66 100644 --- a/typedapi/ccr/pausefollow/pause_follow.go +++ b/typedapi/ccr/pausefollow/pause_follow.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Pauses a follower index. The follower index will not fetch any additional // operations from the leader index. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r PauseFollow) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -300,3 +300,47 @@ func (r *PauseFollow) _index(index string) *PauseFollow { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PauseFollow) ErrorTrace(errortrace bool) *PauseFollow { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PauseFollow) FilterPath(filterpaths ...string) *PauseFollow { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PauseFollow) Human(human bool) *PauseFollow { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PauseFollow) Pretty(pretty bool) *PauseFollow { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ccr/pausefollow/response.go b/typedapi/ccr/pausefollow/response.go index 4fc43161ba..126deafba2 100644 --- a/typedapi/ccr/pausefollow/response.go +++ b/typedapi/ccr/pausefollow/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package pausefollow // Response holds the response body struct for the package pausefollow // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/pause_follow/PauseFollowIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/pause_follow/PauseFollowIndexResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ccr/putautofollowpattern/put_auto_follow_pattern.go b/typedapi/ccr/putautofollowpattern/put_auto_follow_pattern.go index 0e8ef7d55c..591a83105a 100644 --- a/typedapi/ccr/putautofollowpattern/put_auto_follow_pattern.go +++ b/typedapi/ccr/putautofollowpattern/put_auto_follow_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates a new named collection of auto-follow patterns against a specified // remote cluster. Newly created indices on the remote cluster matching any of @@ -32,6 +32,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -318,6 +319,50 @@ func (r *PutAutoFollowPattern) _name(name string) *PutAutoFollowPattern { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutAutoFollowPattern) ErrorTrace(errortrace bool) *PutAutoFollowPattern { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutAutoFollowPattern) FilterPath(filterpaths ...string) *PutAutoFollowPattern { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutAutoFollowPattern) Human(human bool) *PutAutoFollowPattern { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutAutoFollowPattern) Pretty(pretty bool) *PutAutoFollowPattern { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // FollowIndexPattern The name of follower index. The template {{leader_index}} can be used to // derive the name of the follower index from the name of the leader index. When // following a data stream, use {{leader_index}}; CCR does not support changes diff --git a/typedapi/ccr/putautofollowpattern/request.go b/typedapi/ccr/putautofollowpattern/request.go index 92c93a4f8b..bd0218b11a 100644 --- a/typedapi/ccr/putautofollowpattern/request.go +++ b/typedapi/ccr/putautofollowpattern/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putautofollowpattern @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/put_auto_follow_pattern/PutAutoFollowPatternRequest.ts#L27-L112 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/put_auto_follow_pattern/PutAutoFollowPatternRequest.ts#L27-L112 type Request struct { // FollowIndexPattern The name of follower index. The template {{leader_index}} can be used to @@ -93,6 +93,7 @@ func NewRequest() *Request { r := &Request{ Settings: make(map[string]json.RawMessage, 0), } + return r } @@ -139,7 +140,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_outstanding_read_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -155,7 +156,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_outstanding_write_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -171,7 +172,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_read_request_operation_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -197,7 +198,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_write_buffer_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -218,7 +219,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_write_request_operation_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ccr/putautofollowpattern/response.go b/typedapi/ccr/putautofollowpattern/response.go index a44f565a02..e44755ceb1 100644 --- a/typedapi/ccr/putautofollowpattern/response.go +++ b/typedapi/ccr/putautofollowpattern/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putautofollowpattern // Response holds the response body struct for the package putautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/put_auto_follow_pattern/PutAutoFollowPatternResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/put_auto_follow_pattern/PutAutoFollowPatternResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ccr/resumeautofollowpattern/response.go b/typedapi/ccr/resumeautofollowpattern/response.go index 0fb498f798..e622bcae9e 100644 --- a/typedapi/ccr/resumeautofollowpattern/response.go +++ b/typedapi/ccr/resumeautofollowpattern/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package resumeautofollowpattern // Response holds the response body struct for the package resumeautofollowpattern // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/resume_auto_follow_pattern/ResumeAutoFollowPatternResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/resume_auto_follow_pattern/ResumeAutoFollowPatternResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ccr/resumeautofollowpattern/resume_auto_follow_pattern.go b/typedapi/ccr/resumeautofollowpattern/resume_auto_follow_pattern.go index c2e254017e..3d55ee510b 100644 --- a/typedapi/ccr/resumeautofollowpattern/resume_auto_follow_pattern.go +++ b/typedapi/ccr/resumeautofollowpattern/resume_auto_follow_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Resumes an auto-follow pattern that has been paused package resumeautofollowpattern @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r ResumeAutoFollowPattern) IsSuccess(providedCtx context.Context) (bool, e if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -301,3 +301,47 @@ func (r *ResumeAutoFollowPattern) _name(name string) *ResumeAutoFollowPattern { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ResumeAutoFollowPattern) ErrorTrace(errortrace bool) *ResumeAutoFollowPattern { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ResumeAutoFollowPattern) FilterPath(filterpaths ...string) *ResumeAutoFollowPattern { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ResumeAutoFollowPattern) Human(human bool) *ResumeAutoFollowPattern { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ResumeAutoFollowPattern) Pretty(pretty bool) *ResumeAutoFollowPattern { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ccr/resumefollow/request.go b/typedapi/ccr/resumefollow/request.go index bffa39130c..dab0dde46b 100644 --- a/typedapi/ccr/resumefollow/request.go +++ b/typedapi/ccr/resumefollow/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package resumefollow @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package resumefollow // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/resume_follow/ResumeFollowIndexRequest.ts#L25-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/resume_follow/ResumeFollowIndexRequest.ts#L25-L46 type Request struct { MaxOutstandingReadRequests *int64 `json:"max_outstanding_read_requests,omitempty"` MaxOutstandingWriteRequests *int64 `json:"max_outstanding_write_requests,omitempty"` @@ -50,6 +50,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -80,7 +81,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "max_outstanding_read_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +96,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "max_outstanding_write_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -110,7 +111,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "max_read_request_operation_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -142,7 +143,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "max_write_buffer_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -169,7 +170,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.MaxWriteBufferSize = &o case "max_write_request_operation_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ccr/resumefollow/response.go b/typedapi/ccr/resumefollow/response.go index 31364f77dc..4d18070ae7 100644 --- a/typedapi/ccr/resumefollow/response.go +++ b/typedapi/ccr/resumefollow/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package resumefollow // Response holds the response body struct for the package resumefollow // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/resume_follow/ResumeFollowIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/resume_follow/ResumeFollowIndexResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ccr/resumefollow/resume_follow.go b/typedapi/ccr/resumefollow/resume_follow.go index f40d3434d9..8e8afba804 100644 --- a/typedapi/ccr/resumefollow/resume_follow.go +++ b/typedapi/ccr/resumefollow/resume_follow.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Resumes a follower index that has been paused package resumefollow @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -314,6 +315,50 @@ func (r *ResumeFollow) _index(index string) *ResumeFollow { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ResumeFollow) ErrorTrace(errortrace bool) *ResumeFollow { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ResumeFollow) FilterPath(filterpaths ...string) *ResumeFollow { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ResumeFollow) Human(human bool) *ResumeFollow { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ResumeFollow) Pretty(pretty bool) *ResumeFollow { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: max_outstanding_read_requests func (r *ResumeFollow) MaxOutstandingReadRequests(maxoutstandingreadrequests int64) *ResumeFollow { diff --git a/typedapi/ccr/stats/response.go b/typedapi/ccr/stats/response.go index feb7e31aef..f97949edb5 100644 --- a/typedapi/ccr/stats/response.go +++ b/typedapi/ccr/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/stats/CcrStatsResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/stats/CcrStatsResponse.ts#L22-L27 type Response struct { AutoFollowStats types.AutoFollowStats `json:"auto_follow_stats"` FollowStats types.FollowStats `json:"follow_stats"` diff --git a/typedapi/ccr/stats/stats.go b/typedapi/ccr/stats/stats.go index 215d901ff1..322a499955 100644 --- a/typedapi/ccr/stats/stats.go +++ b/typedapi/ccr/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Gets all stats related to cross-cluster replication. package stats @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -248,7 +248,7 @@ func (r Stats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +275,47 @@ func (r *Stats) Header(key, value string) *Stats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Stats) ErrorTrace(errortrace bool) *Stats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Stats) FilterPath(filterpaths ...string) *Stats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Stats) Human(human bool) *Stats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Stats) Pretty(pretty bool) *Stats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ccr/unfollow/response.go b/typedapi/ccr/unfollow/response.go index a838ae172a..c188572228 100644 --- a/typedapi/ccr/unfollow/response.go +++ b/typedapi/ccr/unfollow/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package unfollow // Response holds the response body struct for the package unfollow // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/unfollow/UnfollowIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/unfollow/UnfollowIndexResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ccr/unfollow/unfollow.go b/typedapi/ccr/unfollow/unfollow.go index 9080f04c29..314ed522f3 100644 --- a/typedapi/ccr/unfollow/unfollow.go +++ b/typedapi/ccr/unfollow/unfollow.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Stops the following task associated with a follower index and removes index // metadata and settings associated with cross-cluster replication. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r Unfollow) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -300,3 +300,47 @@ func (r *Unfollow) _index(index string) *Unfollow { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Unfollow) ErrorTrace(errortrace bool) *Unfollow { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Unfollow) FilterPath(filterpaths ...string) *Unfollow { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Unfollow) Human(human bool) *Unfollow { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Unfollow) Pretty(pretty bool) *Unfollow { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cluster/allocationexplain/allocation_explain.go b/typedapi/cluster/allocationexplain/allocation_explain.go index 301e369eb0..88d77457b5 100644 --- a/typedapi/cluster/allocationexplain/allocation_explain.go +++ b/typedapi/cluster/allocationexplain/allocation_explain.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Provides explanations for shard allocations in the cluster. package allocationexplain @@ -310,6 +310,50 @@ func (r *AllocationExplain) IncludeYesDecisions(includeyesdecisions bool) *Alloc return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *AllocationExplain) ErrorTrace(errortrace bool) *AllocationExplain { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *AllocationExplain) FilterPath(filterpaths ...string) *AllocationExplain { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *AllocationExplain) Human(human bool) *AllocationExplain { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *AllocationExplain) Pretty(pretty bool) *AllocationExplain { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // CurrentNode Specifies the node ID or the name of the node to only explain a shard that is // currently located on the specified node. // API name: current_node diff --git a/typedapi/cluster/allocationexplain/request.go b/typedapi/cluster/allocationexplain/request.go index 5f37e75c78..db291dfba2 100644 --- a/typedapi/cluster/allocationexplain/request.go +++ b/typedapi/cluster/allocationexplain/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package allocationexplain @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package allocationexplain // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/allocation_explain/ClusterAllocationExplainRequest.ts#L24-L61 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/allocation_explain/ClusterAllocationExplainRequest.ts#L24-L61 type Request struct { // CurrentNode Specifies the node ID or the name of the node to only explain a shard that is @@ -48,6 +48,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -95,7 +96,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "primary": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -110,7 +111,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "shard": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/cluster/allocationexplain/response.go b/typedapi/cluster/allocationexplain/response.go index 14d4c0f125..37077864fa 100644 --- a/typedapi/cluster/allocationexplain/response.go +++ b/typedapi/cluster/allocationexplain/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package allocationexplain @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package allocationexplain // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/allocation_explain/ClusterAllocationExplainResponse.ts#L32-L64 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/allocation_explain/ClusterAllocationExplainResponse.ts#L32-L64 type Response struct { AllocateExplanation *string `json:"allocate_explanation,omitempty"` AllocationDelay types.Duration `json:"allocation_delay,omitempty"` diff --git a/typedapi/cluster/deletecomponenttemplate/delete_component_template.go b/typedapi/cluster/deletecomponenttemplate/delete_component_template.go index 43bc2517c5..4bd43d5565 100644 --- a/typedapi/cluster/deletecomponenttemplate/delete_component_template.go +++ b/typedapi/cluster/deletecomponenttemplate/delete_component_template.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes a component template +// Deletes component templates. +// Component templates are building blocks for constructing index templates that +// specify index mappings, settings, and aliases. package deletecomponenttemplate import ( @@ -27,9 +29,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +78,9 @@ func NewDeleteComponentTemplateFunc(tp elastictransport.Interface) NewDeleteComp } } -// Deletes a component template +// Deletes component templates. +// Component templates are building blocks for constructing index templates that +// specify index mappings, settings, and aliases. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-component-template.html func New(tp elastictransport.Interface) *DeleteComponentTemplate { @@ -260,7 +264,7 @@ func (r DeleteComponentTemplate) IsSuccess(providedCtx context.Context) (bool, e if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -317,3 +321,47 @@ func (r *DeleteComponentTemplate) Timeout(duration string) *DeleteComponentTempl return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteComponentTemplate) ErrorTrace(errortrace bool) *DeleteComponentTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteComponentTemplate) FilterPath(filterpaths ...string) *DeleteComponentTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteComponentTemplate) Human(human bool) *DeleteComponentTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteComponentTemplate) Pretty(pretty bool) *DeleteComponentTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cluster/deletecomponenttemplate/response.go b/typedapi/cluster/deletecomponenttemplate/response.go index cb7a3b688b..674bbb89ff 100644 --- a/typedapi/cluster/deletecomponenttemplate/response.go +++ b/typedapi/cluster/deletecomponenttemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletecomponenttemplate // Response holds the response body struct for the package deletecomponenttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/delete_component_template/ClusterDeleteComponentTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/delete_component_template/ClusterDeleteComponentTemplateResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/cluster/deletevotingconfigexclusions/delete_voting_config_exclusions.go b/typedapi/cluster/deletevotingconfigexclusions/delete_voting_config_exclusions.go index 3814ac1c94..14405d4a38 100644 --- a/typedapi/cluster/deletevotingconfigexclusions/delete_voting_config_exclusions.go +++ b/typedapi/cluster/deletevotingconfigexclusions/delete_voting_config_exclusions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Clears cluster voting config exclusions. package deletevotingconfigexclusions @@ -26,7 +26,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -198,7 +197,7 @@ func (r DeleteVotingConfigExclusions) IsSuccess(providedCtx context.Context) (bo if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -238,3 +237,47 @@ func (r *DeleteVotingConfigExclusions) WaitForRemoval(waitforremoval bool) *Dele return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteVotingConfigExclusions) ErrorTrace(errortrace bool) *DeleteVotingConfigExclusions { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteVotingConfigExclusions) FilterPath(filterpaths ...string) *DeleteVotingConfigExclusions { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteVotingConfigExclusions) Human(human bool) *DeleteVotingConfigExclusions { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteVotingConfigExclusions) Pretty(pretty bool) *DeleteVotingConfigExclusions { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cluster/existscomponenttemplate/exists_component_template.go b/typedapi/cluster/existscomponenttemplate/exists_component_template.go index fb80db16f2..7156b42557 100644 --- a/typedapi/cluster/existscomponenttemplate/exists_component_template.go +++ b/typedapi/cluster/existscomponenttemplate/exists_component_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns information about whether a particular component template exist package existscomponenttemplate @@ -26,7 +26,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -210,7 +209,7 @@ func (r ExistsComponentTemplate) IsSuccess(providedCtx context.Context) (bool, e if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -266,3 +265,47 @@ func (r *ExistsComponentTemplate) Local(local bool) *ExistsComponentTemplate { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ExistsComponentTemplate) ErrorTrace(errortrace bool) *ExistsComponentTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ExistsComponentTemplate) FilterPath(filterpaths ...string) *ExistsComponentTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ExistsComponentTemplate) Human(human bool) *ExistsComponentTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ExistsComponentTemplate) Pretty(pretty bool) *ExistsComponentTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cluster/getcomponenttemplate/get_component_template.go b/typedapi/cluster/getcomponenttemplate/get_component_template.go index b7d8c62e00..6ccb44ac78 100644 --- a/typedapi/cluster/getcomponenttemplate/get_component_template.go +++ b/typedapi/cluster/getcomponenttemplate/get_component_template.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns one or more component templates +// Retrieves information about component templates. package getcomponenttemplate import ( @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -75,7 +74,7 @@ func NewGetComponentTemplateFunc(tp elastictransport.Interface) NewGetComponentT } } -// Returns one or more component templates +// Retrieves information about component templates. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-component-template.html func New(tp elastictransport.Interface) *GetComponentTemplate { @@ -264,7 +263,7 @@ func (r GetComponentTemplate) IsSuccess(providedCtx context.Context) (bool, erro if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -336,3 +335,47 @@ func (r *GetComponentTemplate) MasterTimeout(duration string) *GetComponentTempl return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetComponentTemplate) ErrorTrace(errortrace bool) *GetComponentTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetComponentTemplate) FilterPath(filterpaths ...string) *GetComponentTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetComponentTemplate) Human(human bool) *GetComponentTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetComponentTemplate) Pretty(pretty bool) *GetComponentTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cluster/getcomponenttemplate/response.go b/typedapi/cluster/getcomponenttemplate/response.go index 654af317ee..749671bbfe 100644 --- a/typedapi/cluster/getcomponenttemplate/response.go +++ b/typedapi/cluster/getcomponenttemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getcomponenttemplate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getcomponenttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/get_component_template/ClusterGetComponentTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/get_component_template/ClusterGetComponentTemplateResponse.ts#L22-L24 type Response struct { ComponentTemplates []types.ClusterComponentTemplate `json:"component_templates"` } diff --git a/typedapi/cluster/getsettings/get_settings.go b/typedapi/cluster/getsettings/get_settings.go index 46f44db7d0..a06c3039cc 100644 --- a/typedapi/cluster/getsettings/get_settings.go +++ b/typedapi/cluster/getsettings/get_settings.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns cluster settings. +// Returns cluster-wide settings. +// By default, it returns only settings that have been explicitly defined. package getsettings import ( @@ -27,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -69,7 +69,8 @@ func NewGetSettingsFunc(tp elastictransport.Interface) NewGetSettings { } } -// Returns cluster settings. +// Returns cluster-wide settings. +// By default, it returns only settings that have been explicitly defined. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-get-settings.html func New(tp elastictransport.Interface) *GetSettings { @@ -249,7 +250,7 @@ func (r GetSettings) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -312,3 +313,47 @@ func (r *GetSettings) Timeout(duration string) *GetSettings { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetSettings) ErrorTrace(errortrace bool) *GetSettings { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetSettings) FilterPath(filterpaths ...string) *GetSettings { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetSettings) Human(human bool) *GetSettings { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetSettings) Pretty(pretty bool) *GetSettings { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cluster/getsettings/response.go b/typedapi/cluster/getsettings/response.go index 823f79115d..83534b4712 100644 --- a/typedapi/cluster/getsettings/response.go +++ b/typedapi/cluster/getsettings/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getsettings @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getsettings // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/get_settings/ClusterGetSettingsResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/get_settings/ClusterGetSettingsResponse.ts#L23-L29 type Response struct { Defaults map[string]json.RawMessage `json:"defaults,omitempty"` Persistent map[string]json.RawMessage `json:"persistent"` diff --git a/typedapi/cluster/health/health.go b/typedapi/cluster/health/health.go index 8114ad3e9a..371f9633c0 100644 --- a/typedapi/cluster/health/health.go +++ b/typedapi/cluster/health/health.go @@ -16,9 +16,18 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Returns basic information about the health of the cluster. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// The cluster health API returns a simple status on the health of the cluster. +// You can also use the API to get the health status of only specified data +// streams and indices. For data streams, the API retrieves the health status of +// the stream’s backing indices. +// The cluster health status is: green, yellow or red. On the shard level, a red +// status indicates that the specific shard is not allocated in the cluster, +// yellow means that the primary shard is allocated but replicas are not, and +// green means that all shards are allocated. The index level status is +// controlled by the worst shard status. The cluster status is controlled by the +// worst index status. package health import ( @@ -28,7 +37,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -80,7 +88,16 @@ func NewHealthFunc(tp elastictransport.Interface) NewHealth { } } -// Returns basic information about the health of the cluster. +// The cluster health API returns a simple status on the health of the cluster. +// You can also use the API to get the health status of only specified data +// streams and indices. For data streams, the API retrieves the health status of +// the stream’s backing indices. +// The cluster health status is: green, yellow or red. On the shard level, a red +// status indicates that the specific shard is not allocated in the cluster, +// yellow means that the primary shard is allocated but replicas are not, and +// green means that all shards are allocated. The index level status is +// controlled by the worst shard status. The cluster status is controlled by the +// worst index status. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-health.html func New(tp elastictransport.Interface) *Health { @@ -237,7 +254,7 @@ func (r Health) Do(providedCtx context.Context) (*Response, error) { } if res.StatusCode == 408 { - data, err := ioutil.ReadAll(res.Body) + data, err := io.ReadAll(res.Body) if err != nil { if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, err) @@ -309,7 +326,7 @@ func (r Health) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -454,3 +471,47 @@ func (r *Health) WaitForStatus(waitforstatus healthstatus.HealthStatus) *Health return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Health) ErrorTrace(errortrace bool) *Health { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Health) FilterPath(filterpaths ...string) *Health { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Health) Human(human bool) *Health { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Health) Pretty(pretty bool) *Health { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cluster/health/response.go b/typedapi/cluster/health/response.go index f755104e2a..77737d89a6 100644 --- a/typedapi/cluster/health/response.go +++ b/typedapi/cluster/health/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package health @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package health // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/health/ClusterHealthResponse.ts#L26-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/health/ClusterHealthResponse.ts#L26-L37 type Response struct { // ActivePrimaryShards The number of active primary shards. diff --git a/typedapi/cluster/info/info.go b/typedapi/cluster/info/info.go index 01be79eb1b..674754d5eb 100644 --- a/typedapi/cluster/info/info.go +++ b/typedapi/cluster/info/info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns different information about the cluster. package info @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -260,7 +260,7 @@ func (r Info) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -297,3 +297,47 @@ func (r *Info) _target(target string) *Info { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Info) ErrorTrace(errortrace bool) *Info { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Info) FilterPath(filterpaths ...string) *Info { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Info) Human(human bool) *Info { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Info) Pretty(pretty bool) *Info { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cluster/info/response.go b/typedapi/cluster/info/response.go index c7b43f05e2..7d3dcf2d38 100644 --- a/typedapi/cluster/info/response.go +++ b/typedapi/cluster/info/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package info @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package info // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/info/ClusterInfoResponse.ts#L26-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/info/ClusterInfoResponse.ts#L26-L34 type Response struct { ClusterName string `json:"cluster_name"` Http *types.Http `json:"http,omitempty"` diff --git a/typedapi/cluster/pendingtasks/pending_tasks.go b/typedapi/cluster/pendingtasks/pending_tasks.go index 44de46ee3d..4bbc06475e 100644 --- a/typedapi/cluster/pendingtasks/pending_tasks.go +++ b/typedapi/cluster/pendingtasks/pending_tasks.go @@ -16,11 +16,17 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Returns a list of any cluster-level changes (e.g. create index, update -// mapping, -// allocate or fail shard) which have not yet been executed. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Returns cluster-level changes (such as create index, update mapping, allocate +// or fail shard) that have not yet been executed. +// NOTE: This API returns a list of any pending updates to the cluster state. +// These are distinct from the tasks reported by the Task Management API which +// include periodic tasks and tasks initiated by the user, such as node stats, +// search queries, or create index requests. +// However, if a user-initiated task such as a create index command causes a +// cluster state update, the activity of this task might be reported by both +// task api and pending cluster tasks API. package pendingtasks import ( @@ -29,7 +35,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -71,9 +76,15 @@ func NewPendingTasksFunc(tp elastictransport.Interface) NewPendingTasks { } } -// Returns a list of any cluster-level changes (e.g. create index, update -// mapping, -// allocate or fail shard) which have not yet been executed. +// Returns cluster-level changes (such as create index, update mapping, allocate +// or fail shard) that have not yet been executed. +// NOTE: This API returns a list of any pending updates to the cluster state. +// These are distinct from the tasks reported by the Task Management API which +// include periodic tasks and tasks initiated by the user, such as node stats, +// search queries, or create index requests. +// However, if a user-initiated task such as a create index command causes a +// cluster state update, the activity of this task might be reported by both +// task api and pending cluster tasks API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-pending.html func New(tp elastictransport.Interface) *PendingTasks { @@ -253,7 +264,7 @@ func (r PendingTasks) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -299,3 +310,47 @@ func (r *PendingTasks) MasterTimeout(duration string) *PendingTasks { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PendingTasks) ErrorTrace(errortrace bool) *PendingTasks { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PendingTasks) FilterPath(filterpaths ...string) *PendingTasks { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PendingTasks) Human(human bool) *PendingTasks { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PendingTasks) Pretty(pretty bool) *PendingTasks { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cluster/pendingtasks/response.go b/typedapi/cluster/pendingtasks/response.go index e900e3d20f..b974092ce7 100644 --- a/typedapi/cluster/pendingtasks/response.go +++ b/typedapi/cluster/pendingtasks/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package pendingtasks @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package pendingtasks // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/pending_tasks/ClusterPendingTasksResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/pending_tasks/ClusterPendingTasksResponse.ts#L22-L24 type Response struct { Tasks []types.PendingTask `json:"tasks"` } diff --git a/typedapi/cluster/postvotingconfigexclusions/post_voting_config_exclusions.go b/typedapi/cluster/postvotingconfigexclusions/post_voting_config_exclusions.go index de6a64a74b..9bc6fcb7c9 100644 --- a/typedapi/cluster/postvotingconfigexclusions/post_voting_config_exclusions.go +++ b/typedapi/cluster/postvotingconfigexclusions/post_voting_config_exclusions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Updates the cluster voting config exclusions by node ids or node names. package postvotingconfigexclusions @@ -26,9 +26,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -197,7 +197,7 @@ func (r PostVotingConfigExclusions) IsSuccess(providedCtx context.Context) (bool if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -254,3 +254,47 @@ func (r *PostVotingConfigExclusions) Timeout(duration string) *PostVotingConfigE return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PostVotingConfigExclusions) ErrorTrace(errortrace bool) *PostVotingConfigExclusions { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PostVotingConfigExclusions) FilterPath(filterpaths ...string) *PostVotingConfigExclusions { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PostVotingConfigExclusions) Human(human bool) *PostVotingConfigExclusions { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PostVotingConfigExclusions) Pretty(pretty bool) *PostVotingConfigExclusions { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cluster/putcomponenttemplate/put_component_template.go b/typedapi/cluster/putcomponenttemplate/put_component_template.go index db4d168ba8..b989eedae5 100644 --- a/typedapi/cluster/putcomponenttemplate/put_component_template.go +++ b/typedapi/cluster/putcomponenttemplate/put_component_template.go @@ -16,9 +16,31 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates or updates a component template +// Creates or updates a component template. +// Component templates are building blocks for constructing index templates that +// specify index mappings, settings, and aliases. +// +// An index template can be composed of multiple component templates. +// To use a component template, specify it in an index template’s `composed_of` +// list. +// Component templates are only applied to new data streams and indices as part +// of a matching index template. +// +// Settings and mappings specified directly in the index template or the create +// index request override any settings or mappings specified in a component +// template. +// +// Component templates are only used during index creation. +// For data streams, this includes data stream creation and the creation of a +// stream’s backing indices. +// Changes to component templates do not affect existing indices, including a +// stream’s backing indices. +// +// You can use C-style `/* *\/` block comments in component templates. +// You can include comments anywhere in the request body except before the +// opening curly bracket. package putcomponenttemplate import ( @@ -81,7 +103,29 @@ func NewPutComponentTemplateFunc(tp elastictransport.Interface) NewPutComponentT } } -// Creates or updates a component template +// Creates or updates a component template. +// Component templates are building blocks for constructing index templates that +// specify index mappings, settings, and aliases. +// +// An index template can be composed of multiple component templates. +// To use a component template, specify it in an index template’s `composed_of` +// list. +// Component templates are only applied to new data streams and indices as part +// of a matching index template. +// +// Settings and mappings specified directly in the index template or the create +// index request override any settings or mappings specified in a component +// template. +// +// Component templates are only used during index creation. +// For data streams, this includes data stream creation and the creation of a +// stream’s backing indices. +// Changes to component templates do not affect existing indices, including a +// stream’s backing indices. +// +// You can use C-style `/* *\/` block comments in component templates. +// You can include comments anywhere in the request body except before the +// opening curly bracket. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-component-template.html func New(tp elastictransport.Interface) *PutComponentTemplate { @@ -342,17 +386,57 @@ func (r *PutComponentTemplate) MasterTimeout(duration string) *PutComponentTempl return r } -// AllowAutoCreate This setting overrides the value of the `action.auto_create_index` cluster -// setting. -// If set to `true` in a template, then indices can be automatically created -// using that -// template even if auto-creation of indices is disabled via -// `actions.auto_create_index`. -// If set to `false` then data streams matching the template must always be -// explicitly created. -// API name: allow_auto_create -func (r *PutComponentTemplate) AllowAutoCreate(allowautocreate bool) *PutComponentTemplate { - r.req.AllowAutoCreate = &allowautocreate +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutComponentTemplate) ErrorTrace(errortrace bool) *PutComponentTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutComponentTemplate) FilterPath(filterpaths ...string) *PutComponentTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutComponentTemplate) Human(human bool) *PutComponentTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutComponentTemplate) Pretty(pretty bool) *PutComponentTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + +// Deprecated Marks this index template as deprecated. When creating or updating a +// non-deprecated index template +// that uses deprecated components, Elasticsearch will emit a deprecation +// warning. +// API name: deprecated +func (r *PutComponentTemplate) Deprecated(deprecated bool) *PutComponentTemplate { + r.req.Deprecated = &deprecated return r } diff --git a/typedapi/cluster/putcomponenttemplate/request.go b/typedapi/cluster/putcomponenttemplate/request.go index 968756708d..ad3205175e 100644 --- a/typedapi/cluster/putcomponenttemplate/request.go +++ b/typedapi/cluster/putcomponenttemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putcomponenttemplate @@ -33,18 +33,14 @@ import ( // Request holds the request body struct for the package putcomponenttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/put_component_template/ClusterPutComponentTemplateRequest.ts#L29-L99 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/put_component_template/ClusterPutComponentTemplateRequest.ts#L25-L93 type Request struct { - // AllowAutoCreate This setting overrides the value of the `action.auto_create_index` cluster - // setting. - // If set to `true` in a template, then indices can be automatically created - // using that - // template even if auto-creation of indices is disabled via - // `actions.auto_create_index`. - // If set to `false` then data streams matching the template must always be - // explicitly created. - AllowAutoCreate *bool `json:"allow_auto_create,omitempty"` + // Deprecated Marks this index template as deprecated. When creating or updating a + // non-deprecated index template + // that uses deprecated components, Elasticsearch will emit a deprecation + // warning. + Deprecated *bool `json:"deprecated,omitempty"` // Meta_ Optional user metadata about the component template. // May have any contents. This map is not automatically generated by // Elasticsearch. @@ -64,6 +60,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -93,18 +90,18 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { - case "allow_auto_create": - var tmp interface{} + case "deprecated": + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: value, err := strconv.ParseBool(v) if err != nil { - return fmt.Errorf("%s | %w", "AllowAutoCreate", err) + return fmt.Errorf("%s | %w", "Deprecated", err) } - s.AllowAutoCreate = &value + s.Deprecated = &value case bool: - s.AllowAutoCreate = &v + s.Deprecated = &v } case "_meta": diff --git a/typedapi/cluster/putcomponenttemplate/response.go b/typedapi/cluster/putcomponenttemplate/response.go index 8a9e55d2aa..12a8b971cd 100644 --- a/typedapi/cluster/putcomponenttemplate/response.go +++ b/typedapi/cluster/putcomponenttemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putcomponenttemplate // Response holds the response body struct for the package putcomponenttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/put_component_template/ClusterPutComponentTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/put_component_template/ClusterPutComponentTemplateResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/cluster/putsettings/put_settings.go b/typedapi/cluster/putsettings/put_settings.go index f6803c0dfe..f5362ea098 100644 --- a/typedapi/cluster/putsettings/put_settings.go +++ b/typedapi/cluster/putsettings/put_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Updates the cluster settings. package putsettings @@ -316,6 +316,50 @@ func (r *PutSettings) Timeout(duration string) *PutSettings { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutSettings) ErrorTrace(errortrace bool) *PutSettings { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutSettings) FilterPath(filterpaths ...string) *PutSettings { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutSettings) Human(human bool) *PutSettings { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutSettings) Pretty(pretty bool) *PutSettings { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: persistent func (r *PutSettings) Persistent(persistent map[string]json.RawMessage) *PutSettings { diff --git a/typedapi/cluster/putsettings/request.go b/typedapi/cluster/putsettings/request.go index 79886d9bef..60cd2e5086 100644 --- a/typedapi/cluster/putsettings/request.go +++ b/typedapi/cluster/putsettings/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putsettings @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package putsettings // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/put_settings/ClusterPutSettingsRequest.ts#L25-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/put_settings/ClusterPutSettingsRequest.ts#L25-L43 type Request struct { Persistent map[string]json.RawMessage `json:"persistent,omitempty"` Transient map[string]json.RawMessage `json:"transient,omitempty"` @@ -39,6 +39,7 @@ func NewRequest() *Request { Persistent: make(map[string]json.RawMessage, 0), Transient: make(map[string]json.RawMessage, 0), } + return r } diff --git a/typedapi/cluster/putsettings/response.go b/typedapi/cluster/putsettings/response.go index 679e9eecd0..61fdcf5acc 100644 --- a/typedapi/cluster/putsettings/response.go +++ b/typedapi/cluster/putsettings/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putsettings @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putsettings // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/put_settings/ClusterPutSettingsResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/put_settings/ClusterPutSettingsResponse.ts#L23-L29 type Response struct { Acknowledged bool `json:"acknowledged"` Persistent map[string]json.RawMessage `json:"persistent"` diff --git a/typedapi/cluster/remoteinfo/remote_info.go b/typedapi/cluster/remoteinfo/remote_info.go index 54c429acf0..cb7795171d 100644 --- a/typedapi/cluster/remoteinfo/remote_info.go +++ b/typedapi/cluster/remoteinfo/remote_info.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns the information about configured remote clusters. +// The cluster remote info API allows you to retrieve all of the configured +// remote cluster information. It returns connection and endpoint information +// keyed by the configured remote cluster alias. package remoteinfo import ( @@ -27,9 +29,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -68,7 +70,9 @@ func NewRemoteInfoFunc(tp elastictransport.Interface) NewRemoteInfo { } } -// Returns the information about configured remote clusters. +// The cluster remote info API allows you to retrieve all of the configured +// remote cluster information. It returns connection and endpoint information +// keyed by the configured remote cluster alias. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-remote-info.html func New(tp elastictransport.Interface) *RemoteInfo { @@ -248,7 +252,7 @@ func (r RemoteInfo) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +279,47 @@ func (r *RemoteInfo) Header(key, value string) *RemoteInfo { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *RemoteInfo) ErrorTrace(errortrace bool) *RemoteInfo { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *RemoteInfo) FilterPath(filterpaths ...string) *RemoteInfo { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *RemoteInfo) Human(human bool) *RemoteInfo { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *RemoteInfo) Pretty(pretty bool) *RemoteInfo { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cluster/remoteinfo/response.go b/typedapi/cluster/remoteinfo/response.go index 87865a5941..f60297ca9f 100644 --- a/typedapi/cluster/remoteinfo/response.go +++ b/typedapi/cluster/remoteinfo/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package remoteinfo @@ -32,7 +32,7 @@ import ( // Response holds the response body struct for the package remoteinfo // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L24-L27 type Response map[string]types.ClusterRemoteInfo @@ -45,7 +45,7 @@ func NewResponse() Response { func (r Response) UnmarshalJSON(data []byte) error { dec := json.NewDecoder(bytes.NewReader(data)) - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) dec.Decode(&o) dec = json.NewDecoder(bytes.NewReader(data)) for { @@ -59,7 +59,7 @@ func (r Response) UnmarshalJSON(data []byte) error { key := fmt.Sprintf("%s", t) if target, ok := o[key]; ok { - if t, ok := target.(map[string]interface{})["mode"]; ok { + if t, ok := target.(map[string]any)["mode"]; ok { switch t { diff --git a/typedapi/cluster/reroute/request.go b/typedapi/cluster/reroute/request.go index 43fd2a8922..12168e386e 100644 --- a/typedapi/cluster/reroute/request.go +++ b/typedapi/cluster/reroute/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package reroute @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package reroute // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/reroute/ClusterRerouteRequest.ts#L25-L70 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/reroute/ClusterRerouteRequest.ts#L25-L70 type Request struct { // Commands Defines the commands to perform. @@ -39,6 +39,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/cluster/reroute/reroute.go b/typedapi/cluster/reroute/reroute.go index 74178a6339..5a375eaf18 100644 --- a/typedapi/cluster/reroute/reroute.go +++ b/typedapi/cluster/reroute/reroute.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Allows to manually change the allocation of individual shards in the cluster. package reroute @@ -345,6 +345,50 @@ func (r *Reroute) Timeout(duration string) *Reroute { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Reroute) ErrorTrace(errortrace bool) *Reroute { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Reroute) FilterPath(filterpaths ...string) *Reroute { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Reroute) Human(human bool) *Reroute { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Reroute) Pretty(pretty bool) *Reroute { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Commands Defines the commands to perform. // API name: commands func (r *Reroute) Commands(commands ...types.Command) *Reroute { diff --git a/typedapi/cluster/reroute/response.go b/typedapi/cluster/reroute/response.go index 1a838ecc7c..56c6dbffd5 100644 --- a/typedapi/cluster/reroute/response.go +++ b/typedapi/cluster/reroute/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package reroute @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package reroute // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/reroute/ClusterRerouteResponse.ts#L23-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/reroute/ClusterRerouteResponse.ts#L23-L34 type Response struct { Acknowledged bool `json:"acknowledged"` Explanations []types.RerouteExplanation `json:"explanations,omitempty"` diff --git a/typedapi/cluster/state/response.go b/typedapi/cluster/state/response.go index 110deea739..9e7f2ed5e7 100644 --- a/typedapi/cluster/state/response.go +++ b/typedapi/cluster/state/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package state @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package state // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/state/ClusterStateResponse.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/state/ClusterStateResponse.ts#L22-L29 type Response = json.RawMessage diff --git a/typedapi/cluster/state/state.go b/typedapi/cluster/state/state.go index 3dbc263518..805c2635e4 100644 --- a/typedapi/cluster/state/state.go +++ b/typedapi/cluster/state/state.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns a comprehensive information about the state of the cluster. package state @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -291,7 +290,7 @@ func (r State) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -410,3 +409,47 @@ func (r *State) WaitForTimeout(duration string) *State { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *State) ErrorTrace(errortrace bool) *State { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *State) FilterPath(filterpaths ...string) *State { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *State) Human(human bool) *State { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *State) Pretty(pretty bool) *State { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/cluster/stats/response.go b/typedapi/cluster/stats/response.go index a7ef2e74c8..c828f37a41 100644 --- a/typedapi/cluster/stats/response.go +++ b/typedapi/cluster/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stats @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/ClusterStatsResponse.ts#L53-L55 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/ClusterStatsResponse.ts#L53-L55 type Response struct { // ClusterName Name of the cluster, based on the cluster name setting. diff --git a/typedapi/cluster/stats/stats.go b/typedapi/cluster/stats/stats.go index ca42923ce1..b14f44b9c5 100644 --- a/typedapi/cluster/stats/stats.go +++ b/typedapi/cluster/stats/stats.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns high-level overview of cluster statistics. +// Returns cluster statistics. +// It returns basic index metrics (shard numbers, store size, memory usage) and +// information about the current nodes that form the cluster (number, roles, os, +// jvm versions, memory usage, cpu and installed plugins). package stats import ( @@ -27,7 +30,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -75,7 +77,10 @@ func NewStatsFunc(tp elastictransport.Interface) NewStats { } } -// Returns high-level overview of cluster statistics. +// Returns cluster statistics. +// It returns basic index metrics (shard numbers, store size, memory usage) and +// information about the current nodes that form the cluster (number, roles, os, +// jvm versions, memory usage, cpu and installed plugins). // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-stats.html func New(tp elastictransport.Interface) *Stats { @@ -270,7 +275,7 @@ func (r Stats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -327,3 +332,47 @@ func (r *Stats) Timeout(duration string) *Stats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Stats) ErrorTrace(errortrace bool) *Stats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Stats) FilterPath(filterpaths ...string) *Stats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Stats) Human(human bool) *Stats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Stats) Pretty(pretty bool) *Stats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/bulk/bulk.go b/typedapi/core/bulk/bulk.go index db5927840a..3f9e507c69 100644 --- a/typedapi/core/bulk/bulk.go +++ b/typedapi/core/bulk/bulk.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Allows to perform multiple index/update/delete operations in a single -// request. +// Performs multiple indexing or delete operations in a single API call. +// This reduces overhead and can greatly increase indexing speed. package bulk import ( @@ -81,8 +81,8 @@ func NewBulkFunc(tp elastictransport.Interface) NewBulk { } } -// Allows to perform multiple index/update/delete operations in a single -// request. +// Performs multiple indexing or delete operations in a single API call. +// This reduces overhead and can greatly increase indexing speed. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html func New(tp elastictransport.Interface) *Bulk { @@ -405,3 +405,47 @@ func (r *Bulk) RequireAlias(requirealias bool) *Bulk { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Bulk) ErrorTrace(errortrace bool) *Bulk { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Bulk) FilterPath(filterpaths ...string) *Bulk { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Bulk) Human(human bool) *Bulk { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Bulk) Pretty(pretty bool) *Bulk { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/bulk/request.go b/typedapi/core/bulk/request.go index 141e5fc5c6..f9e5e2bc3d 100644 --- a/typedapi/core/bulk/request.go +++ b/typedapi/core/bulk/request.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package bulk // Request holds the request body struct for the package bulk // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/bulk/BulkRequest.ts#L32-L103 -type Request = []interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/bulk/BulkRequest.ts#L32-L103 +type Request = []any diff --git a/typedapi/core/bulk/response.go b/typedapi/core/bulk/response.go index 3bbb5b7949..b799d2b36d 100644 --- a/typedapi/core/bulk/response.go +++ b/typedapi/core/bulk/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package bulk @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package bulk // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/bulk/BulkResponse.ts#L24-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/bulk/BulkResponse.ts#L24-L31 type Response struct { Errors bool `json:"errors"` IngestTook *int64 `json:"ingest_took,omitempty"` diff --git a/typedapi/core/clearscroll/clear_scroll.go b/typedapi/core/clearscroll/clear_scroll.go index 2ef711ac26..0ef3e12df5 100644 --- a/typedapi/core/clearscroll/clear_scroll.go +++ b/typedapi/core/clearscroll/clear_scroll.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Explicitly clears the search context for a scroll. +// Clears the search context and results for a scrolling search. package clearscroll import ( @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -75,7 +75,7 @@ func NewClearScrollFunc(tp elastictransport.Interface) NewClearScroll { } } -// Explicitly clears the search context for a scroll. +// Clears the search context and results for a scrolling search. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/clear-scroll-api.html func New(tp elastictransport.Interface) *ClearScroll { @@ -269,7 +269,7 @@ func (r ClearScroll) Do(providedCtx context.Context) (*Response, error) { } if res.StatusCode == 404 { - data, err := ioutil.ReadAll(res.Body) + data, err := io.ReadAll(res.Body) if err != nil { if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, err) @@ -330,6 +330,50 @@ func (r *ClearScroll) Header(key, value string) *ClearScroll { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ClearScroll) ErrorTrace(errortrace bool) *ClearScroll { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ClearScroll) FilterPath(filterpaths ...string) *ClearScroll { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ClearScroll) Human(human bool) *ClearScroll { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ClearScroll) Pretty(pretty bool) *ClearScroll { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // ScrollId Scroll IDs to clear. // To clear all scroll IDs, use `_all`. // API name: scroll_id diff --git a/typedapi/core/clearscroll/request.go b/typedapi/core/clearscroll/request.go index a603d49dce..ddf6193f00 100644 --- a/typedapi/core/clearscroll/request.go +++ b/typedapi/core/clearscroll/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clearscroll @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package clearscroll // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/clear_scroll/ClearScrollRequest.ts#L23-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/clear_scroll/ClearScrollRequest.ts#L23-L45 type Request struct { // ScrollId Scroll IDs to clear. @@ -38,6 +38,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/core/clearscroll/response.go b/typedapi/core/clearscroll/response.go index 4e1561f089..4f9475fa23 100644 --- a/typedapi/core/clearscroll/response.go +++ b/typedapi/core/clearscroll/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clearscroll // Response holds the response body struct for the package clearscroll // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/clear_scroll/ClearScrollResponse.ts#L22-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/clear_scroll/ClearScrollResponse.ts#L22-L36 type Response struct { NumFreed int `json:"num_freed"` Succeeded bool `json:"succeeded"` diff --git a/typedapi/core/closepointintime/close_point_in_time.go b/typedapi/core/closepointintime/close_point_in_time.go index 497ccac957..e93c0f4f28 100644 --- a/typedapi/core/closepointintime/close_point_in_time.go +++ b/typedapi/core/closepointintime/close_point_in_time.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Close a point in time +// Closes a point-in-time. package closepointintime import ( @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -73,7 +73,7 @@ func NewClosePointInTimeFunc(tp elastictransport.Interface) NewClosePointInTime } } -// Close a point in time +// Closes a point-in-time. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html func New(tp elastictransport.Interface) *ClosePointInTime { @@ -265,7 +265,7 @@ func (r ClosePointInTime) Do(providedCtx context.Context) (*Response, error) { } if res.StatusCode == 404 { - data, err := ioutil.ReadAll(res.Body) + data, err := io.ReadAll(res.Body) if err != nil { if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, err) @@ -326,6 +326,50 @@ func (r *ClosePointInTime) Header(key, value string) *ClosePointInTime { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ClosePointInTime) ErrorTrace(errortrace bool) *ClosePointInTime { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ClosePointInTime) FilterPath(filterpaths ...string) *ClosePointInTime { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ClosePointInTime) Human(human bool) *ClosePointInTime { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ClosePointInTime) Pretty(pretty bool) *ClosePointInTime { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Id The ID of the point-in-time. // API name: id func (r *ClosePointInTime) Id(id string) *ClosePointInTime { diff --git a/typedapi/core/closepointintime/request.go b/typedapi/core/closepointintime/request.go index 464acf9391..617e2ec16c 100644 --- a/typedapi/core/closepointintime/request.go +++ b/typedapi/core/closepointintime/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package closepointintime @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package closepointintime // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/close_point_in_time/ClosePointInTimeRequest.ts#L23-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/close_point_in_time/ClosePointInTimeRequest.ts#L23-L37 type Request struct { // Id The ID of the point-in-time. @@ -37,6 +37,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/core/closepointintime/response.go b/typedapi/core/closepointintime/response.go index 3c04c94a11..9562ce4806 100644 --- a/typedapi/core/closepointintime/response.go +++ b/typedapi/core/closepointintime/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package closepointintime // Response holds the response body struct for the package closepointintime // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/close_point_in_time/ClosePointInTimeResponse.ts#L22-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/close_point_in_time/ClosePointInTimeResponse.ts#L22-L36 type Response struct { NumFreed int `json:"num_freed"` Succeeded bool `json:"succeeded"` diff --git a/typedapi/core/count/count.go b/typedapi/core/count/count.go index 70af3f4b77..a8418be860 100644 --- a/typedapi/core/count/count.go +++ b/typedapi/core/count/count.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns number of documents matching a query. package count @@ -456,6 +456,50 @@ func (r *Count) Q(q string) *Count { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Count) ErrorTrace(errortrace bool) *Count { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Count) FilterPath(filterpaths ...string) *Count { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Count) Human(human bool) *Count { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Count) Pretty(pretty bool) *Count { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Query Defines the search definition using the Query DSL. // API name: query func (r *Count) Query(query *types.Query) *Count { diff --git a/typedapi/core/count/request.go b/typedapi/core/count/request.go index 136bba94d8..949efd88c1 100644 --- a/typedapi/core/count/request.go +++ b/typedapi/core/count/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package count @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package count // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/count/CountRequest.ts#L26-L120 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/count/CountRequest.ts#L26-L120 type Request struct { // Query Defines the search definition using the Query DSL. @@ -39,6 +39,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/core/count/response.go b/typedapi/core/count/response.go index 171e0678a1..9e88dd312d 100644 --- a/typedapi/core/count/response.go +++ b/typedapi/core/count/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package count @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package count // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/count/CountResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/count/CountResponse.ts#L23-L25 type Response struct { Count int64 `json:"count"` Shards_ types.ShardStatistics `json:"_shards"` diff --git a/typedapi/core/create/create.go b/typedapi/core/create/create.go index c80276c457..659464e4bf 100644 --- a/typedapi/core/create/create.go +++ b/typedapi/core/create/create.go @@ -16,12 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates a new document in the index. -// -// Returns a 409 response when a document with a same ID already exists in the -// index. +// Adds a JSON document to the specified data stream or index and makes it +// searchable. +// If the target is an index and the document already exists, the request +// updates the document and increments its version. package create import ( @@ -33,6 +33,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -59,8 +60,8 @@ type Create struct { raw io.Reader - req interface{} - deferred []func(request interface{}) error + req any + deferred []func(request any) error buf *gobytes.Buffer paramSet int @@ -90,10 +91,10 @@ func NewCreateFunc(tp elastictransport.Interface) NewCreate { } } -// Creates a new document in the index. -// -// Returns a 409 response when a document with a same ID already exists in the -// index. +// Adds a JSON document to the specified data stream or index and makes it +// searchable. +// If the target is an index and the document already exists, the request +// updates the document and increments its version. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html func New(tp elastictransport.Interface) *Create { @@ -103,6 +104,8 @@ func New(tp elastictransport.Interface) *Create { headers: make(http.Header), buf: gobytes.NewBuffer(nil), + + req: NewRequest(), } if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { @@ -123,14 +126,14 @@ func (r *Create) Raw(raw io.Reader) *Create { } // Request allows to set the request property with the appropriate payload. -func (r *Create) Request(req interface{}) *Create { +func (r *Create) Request(req any) *Create { r.req = req return r } // Document allows to set the request property with the appropriate payload. -func (r *Create) Document(document interface{}) *Create { +func (r *Create) Document(document any) *Create { r.req = document return r @@ -418,3 +421,47 @@ func (r *Create) WaitForActiveShards(waitforactiveshards string) *Create { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Create) ErrorTrace(errortrace bool) *Create { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Create) FilterPath(filterpaths ...string) *Create { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Create) Human(human bool) *Create { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Create) Pretty(pretty bool) *Create { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/create/request.go b/typedapi/core/create/request.go index 9035758ac9..b37febf44b 100644 --- a/typedapi/core/create/request.go +++ b/typedapi/core/create/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package create @@ -26,5 +26,12 @@ import ( // Request holds the request body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/create/CreateRequest.ts#L32-L95 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/create/CreateRequest.ts#L32-L95 type Request = json.RawMessage + +// NewRequest returns a Request +func NewRequest() *Request { + r := new(json.RawMessage) + + return r +} diff --git a/typedapi/core/create/response.go b/typedapi/core/create/response.go index b42c51f248..76f4fa128c 100644 --- a/typedapi/core/create/response.go +++ b/typedapi/core/create/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package create @@ -27,14 +27,14 @@ import ( // Response holds the response body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/create/CreateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/create/CreateResponse.ts#L22-L24 type Response struct { ForcedRefresh *bool `json:"forced_refresh,omitempty"` Id_ string `json:"_id"` Index_ string `json:"_index"` - PrimaryTerm_ int64 `json:"_primary_term"` + PrimaryTerm_ *int64 `json:"_primary_term,omitempty"` Result result.Result `json:"result"` - SeqNo_ int64 `json:"_seq_no"` + SeqNo_ *int64 `json:"_seq_no,omitempty"` Shards_ types.ShardStatistics `json:"_shards"` Version_ int64 `json:"_version"` } diff --git a/typedapi/core/delete/delete.go b/typedapi/core/delete/delete.go index a09694019d..f31b19f645 100644 --- a/typedapi/core/delete/delete.go +++ b/typedapi/core/delete/delete.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Removes a document from the index. +// Removes a JSON document from the specified index. package delete import ( @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -84,7 +84,7 @@ func NewDeleteFunc(tp elastictransport.Interface) NewDelete { } } -// Removes a document from the index. +// Removes a JSON document from the specified index. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete.html func New(tp elastictransport.Interface) *Delete { @@ -238,7 +238,7 @@ func (r Delete) Do(providedCtx context.Context) (*Response, error) { } if res.StatusCode == 404 { - data, err := ioutil.ReadAll(res.Body) + data, err := io.ReadAll(res.Body) if err != nil { if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, err) @@ -310,7 +310,7 @@ func (r Delete) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -427,3 +427,47 @@ func (r *Delete) WaitForActiveShards(waitforactiveshards string) *Delete { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Delete) ErrorTrace(errortrace bool) *Delete { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Delete) FilterPath(filterpaths ...string) *Delete { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Delete) Human(human bool) *Delete { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Delete) Pretty(pretty bool) *Delete { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/delete/response.go b/typedapi/core/delete/response.go index c829be55da..931d02cf2e 100644 --- a/typedapi/core/delete/response.go +++ b/typedapi/core/delete/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package delete @@ -27,14 +27,14 @@ import ( // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/delete/DeleteResponse.ts#L22-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/delete/DeleteResponse.ts#L22-L34 type Response struct { ForcedRefresh *bool `json:"forced_refresh,omitempty"` Id_ string `json:"_id"` Index_ string `json:"_index"` - PrimaryTerm_ int64 `json:"_primary_term"` + PrimaryTerm_ *int64 `json:"_primary_term,omitempty"` Result result.Result `json:"result"` - SeqNo_ int64 `json:"_seq_no"` + SeqNo_ *int64 `json:"_seq_no,omitempty"` Shards_ types.ShardStatistics `json:"_shards"` Version_ int64 `json:"_version"` } diff --git a/typedapi/core/deletebyquery/delete_by_query.go b/typedapi/core/deletebyquery/delete_by_query.go index 34b0119ee7..89937626a3 100644 --- a/typedapi/core/deletebyquery/delete_by_query.go +++ b/typedapi/core/deletebyquery/delete_by_query.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes documents matching the provided query. +// Deletes documents that match the specified query. package deletebyquery import ( @@ -85,7 +85,7 @@ func NewDeleteByQueryFunc(tp elastictransport.Interface) NewDeleteByQuery { } } -// Deletes documents matching the provided query. +// Deletes documents that match the specified query. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-delete-by-query.html func New(tp elastictransport.Interface) *DeleteByQuery { @@ -581,6 +581,50 @@ func (r *DeleteByQuery) WaitForCompletion(waitforcompletion bool) *DeleteByQuery return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteByQuery) ErrorTrace(errortrace bool) *DeleteByQuery { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteByQuery) FilterPath(filterpaths ...string) *DeleteByQuery { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteByQuery) Human(human bool) *DeleteByQuery { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteByQuery) Pretty(pretty bool) *DeleteByQuery { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // MaxDocs The maximum number of documents to delete. // API name: max_docs func (r *DeleteByQuery) MaxDocs(maxdocs int64) *DeleteByQuery { diff --git a/typedapi/core/deletebyquery/request.go b/typedapi/core/deletebyquery/request.go index 2899245c71..4b80a0201d 100644 --- a/typedapi/core/deletebyquery/request.go +++ b/typedapi/core/deletebyquery/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletebyquery @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package deletebyquery // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/delete_by_query/DeleteByQueryRequest.ts#L36-L209 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/delete_by_query/DeleteByQueryRequest.ts#L36-L209 type Request struct { // MaxDocs The maximum number of documents to delete. @@ -44,6 +44,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/core/deletebyquery/response.go b/typedapi/core/deletebyquery/response.go index 9717b76324..fdbf9be0ab 100644 --- a/typedapi/core/deletebyquery/response.go +++ b/typedapi/core/deletebyquery/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletebyquery @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package deletebyquery // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/delete_by_query/DeleteByQueryResponse.ts#L26-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/delete_by_query/DeleteByQueryResponse.ts#L26-L45 type Response struct { Batches *int64 `json:"batches,omitempty"` Deleted *int64 `json:"deleted,omitempty"` diff --git a/typedapi/core/deletebyqueryrethrottle/delete_by_query_rethrottle.go b/typedapi/core/deletebyqueryrethrottle/delete_by_query_rethrottle.go index 8675741483..ff5de3534c 100644 --- a/typedapi/core/deletebyqueryrethrottle/delete_by_query_rethrottle.go +++ b/typedapi/core/deletebyqueryrethrottle/delete_by_query_rethrottle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Changes the number of requests per second for a particular Delete By Query // operation. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r DeleteByQueryRethrottle) IsSuccess(providedCtx context.Context) (bool, e if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -308,3 +308,47 @@ func (r *DeleteByQueryRethrottle) RequestsPerSecond(requestspersecond string) *D return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteByQueryRethrottle) ErrorTrace(errortrace bool) *DeleteByQueryRethrottle { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteByQueryRethrottle) FilterPath(filterpaths ...string) *DeleteByQueryRethrottle { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteByQueryRethrottle) Human(human bool) *DeleteByQueryRethrottle { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteByQueryRethrottle) Pretty(pretty bool) *DeleteByQueryRethrottle { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/deletebyqueryrethrottle/response.go b/typedapi/core/deletebyqueryrethrottle/response.go index 5fce45ee85..008f791113 100644 --- a/typedapi/core/deletebyqueryrethrottle/response.go +++ b/typedapi/core/deletebyqueryrethrottle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletebyqueryrethrottle @@ -32,7 +32,7 @@ import ( // Response holds the response body struct for the package deletebyqueryrethrottle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/delete_by_query_rethrottle/DeleteByQueryRethrottleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/delete_by_query_rethrottle/DeleteByQueryRethrottleResponse.ts#L22-L24 type Response struct { NodeFailures []types.ErrorCause `json:"node_failures,omitempty"` // Nodes Task information grouped by node, if `group_by` was set to `node` (the diff --git a/typedapi/core/deletescript/delete_script.go b/typedapi/core/deletescript/delete_script.go index 91879c595e..73f029961c 100644 --- a/typedapi/core/deletescript/delete_script.go +++ b/typedapi/core/deletescript/delete_script.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes a script. +// Deletes a stored script or search template. package deletescript import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +76,7 @@ func NewDeleteScriptFunc(tp elastictransport.Interface) NewDeleteScript { } } -// Deletes a script. +// Deletes a stored script or search template. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html func New(tp elastictransport.Interface) *DeleteScript { @@ -260,7 +260,7 @@ func (r DeleteScript) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -316,3 +316,47 @@ func (r *DeleteScript) Timeout(duration string) *DeleteScript { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteScript) ErrorTrace(errortrace bool) *DeleteScript { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteScript) FilterPath(filterpaths ...string) *DeleteScript { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteScript) Human(human bool) *DeleteScript { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteScript) Pretty(pretty bool) *DeleteScript { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/deletescript/response.go b/typedapi/core/deletescript/response.go index aac4210628..d3329b656f 100644 --- a/typedapi/core/deletescript/response.go +++ b/typedapi/core/deletescript/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletescript // Response holds the response body struct for the package deletescript // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/delete_script/DeleteScriptResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/delete_script/DeleteScriptResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/core/exists/exists.go b/typedapi/core/exists/exists.go index 359c5b2998..63da73e7e0 100644 --- a/typedapi/core/exists/exists.go +++ b/typedapi/core/exists/exists.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns information about whether a document exists in an index. +// Checks if a document in an index exists. package exists import ( @@ -26,7 +26,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -81,7 +80,7 @@ func NewExistsFunc(tp elastictransport.Interface) NewExists { } } -// Returns information about whether a document exists in an index. +// Checks if a document in an index exists. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-get.html func New(tp elastictransport.Interface) *Exists { @@ -222,7 +221,7 @@ func (r Exists) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -355,3 +354,47 @@ func (r *Exists) VersionType(versiontype versiontype.VersionType) *Exists { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Exists) ErrorTrace(errortrace bool) *Exists { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Exists) FilterPath(filterpaths ...string) *Exists { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Exists) Human(human bool) *Exists { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Exists) Pretty(pretty bool) *Exists { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/existssource/exists_source.go b/typedapi/core/existssource/exists_source.go index f78d9f6a58..1d8bf79b5b 100644 --- a/typedapi/core/existssource/exists_source.go +++ b/typedapi/core/existssource/exists_source.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns information about whether a document source exists in an index. +// Checks if a document's `_source` is stored. package existssource import ( @@ -26,7 +26,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -81,7 +80,7 @@ func NewExistsSourceFunc(tp elastictransport.Interface) NewExistsSource { } } -// Returns information about whether a document source exists in an index. +// Checks if a document's `_source` is stored. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-get.html func New(tp elastictransport.Interface) *ExistsSource { @@ -222,7 +221,7 @@ func (r ExistsSource) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -345,3 +344,47 @@ func (r *ExistsSource) VersionType(versiontype versiontype.VersionType) *ExistsS return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ExistsSource) ErrorTrace(errortrace bool) *ExistsSource { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ExistsSource) FilterPath(filterpaths ...string) *ExistsSource { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ExistsSource) Human(human bool) *ExistsSource { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ExistsSource) Pretty(pretty bool) *ExistsSource { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/explain/explain.go b/typedapi/core/explain/explain.go index f5c8aa7c84..874a93157b 100644 --- a/typedapi/core/explain/explain.go +++ b/typedapi/core/explain/explain.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns information about why a specific matches (or doesn't match) a query. +// Returns information about why a specific document matches (or doesn’t match) +// a query. package explain import ( @@ -87,7 +88,8 @@ func NewExplainFunc(tp elastictransport.Interface) NewExplain { } } -// Returns information about why a specific matches (or doesn't match) a query. +// Returns information about why a specific document matches (or doesn’t match) +// a query. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-explain.html func New(tp elastictransport.Interface) *Explain { @@ -436,6 +438,50 @@ func (r *Explain) Q(q string) *Explain { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Explain) ErrorTrace(errortrace bool) *Explain { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Explain) FilterPath(filterpaths ...string) *Explain { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Explain) Human(human bool) *Explain { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Explain) Pretty(pretty bool) *Explain { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Query Defines the search definition using the Query DSL. // API name: query func (r *Explain) Query(query *types.Query) *Explain { diff --git a/typedapi/core/explain/request.go b/typedapi/core/explain/request.go index 40928a5b36..ca27035580 100644 --- a/typedapi/core/explain/request.go +++ b/typedapi/core/explain/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package explain @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package explain // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/explain/ExplainRequest.ts#L26-L105 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/explain/ExplainRequest.ts#L26-L105 type Request struct { // Query Defines the search definition using the Query DSL. @@ -39,6 +39,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/core/explain/response.go b/typedapi/core/explain/response.go index 7ad2eb183e..34ac8d9de2 100644 --- a/typedapi/core/explain/response.go +++ b/typedapi/core/explain/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package explain @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package explain // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/explain/ExplainResponse.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/explain/ExplainResponse.ts#L23-L31 type Response struct { Explanation *types.ExplanationDetail `json:"explanation,omitempty"` Get *types.InlineGet `json:"get,omitempty"` diff --git a/typedapi/core/fieldcaps/field_caps.go b/typedapi/core/fieldcaps/field_caps.go index c9fcb54850..f2db7710da 100644 --- a/typedapi/core/fieldcaps/field_caps.go +++ b/typedapi/core/fieldcaps/field_caps.go @@ -16,10 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Returns the information about the capabilities of fields among multiple -// indices. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// The field capabilities API returns the information about the capabilities of +// fields among multiple indices. +// The field capabilities API returns runtime fields like any other field. For +// example, a runtime field with a type +// of keyword is returned as any other field that belongs to the `keyword` +// family. package fieldcaps import ( @@ -81,8 +85,12 @@ func NewFieldCapsFunc(tp elastictransport.Interface) NewFieldCaps { } } -// Returns the information about the capabilities of fields among multiple -// indices. +// The field capabilities API returns the information about the capabilities of +// fields among multiple indices. +// The field capabilities API returns runtime fields like any other field. For +// example, a runtime field with a type +// of keyword is returned as any other field that belongs to the `keyword` +// family. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-field-caps.html func New(tp elastictransport.Interface) *FieldCaps { @@ -393,6 +401,50 @@ func (r *FieldCaps) IncludeEmptyFields(includeemptyfields bool) *FieldCaps { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *FieldCaps) ErrorTrace(errortrace bool) *FieldCaps { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *FieldCaps) FilterPath(filterpaths ...string) *FieldCaps { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *FieldCaps) Human(human bool) *FieldCaps { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *FieldCaps) Pretty(pretty bool) *FieldCaps { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Fields List of fields to retrieve capabilities for. Wildcard (`*`) expressions are // supported. // API name: fields diff --git a/typedapi/core/fieldcaps/request.go b/typedapi/core/fieldcaps/request.go index cac6b8b507..b38c530286 100644 --- a/typedapi/core/fieldcaps/request.go +++ b/typedapi/core/fieldcaps/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package fieldcaps @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package fieldcaps // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/field_caps/FieldCapabilitiesRequest.ts#L25-L106 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/field_caps/FieldCapabilitiesRequest.ts#L25-L106 type Request struct { // Fields List of fields to retrieve capabilities for. Wildcard (`*`) expressions are @@ -51,6 +51,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/core/fieldcaps/response.go b/typedapi/core/fieldcaps/response.go index e58d415030..13cc0cbf8b 100644 --- a/typedapi/core/fieldcaps/response.go +++ b/typedapi/core/fieldcaps/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package fieldcaps @@ -32,7 +32,7 @@ import ( // Response holds the response body struct for the package fieldcaps // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/field_caps/FieldCapabilitiesResponse.ts#L24-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/field_caps/FieldCapabilitiesResponse.ts#L24-L35 type Response struct { Fields map[string]map[string]types.FieldCapability `json:"fields"` Indices []string `json:"indices"` diff --git a/typedapi/core/get/get.go b/typedapi/core/get/get.go index deb5c01921..a4995b6f50 100644 --- a/typedapi/core/get/get.go +++ b/typedapi/core/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns a document. package get @@ -28,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -238,7 +237,7 @@ func (r Get) Do(providedCtx context.Context) (*Response, error) { } if res.StatusCode == 404 { - data, err := ioutil.ReadAll(res.Body) + data, err := io.ReadAll(res.Body) if err != nil { if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, err) @@ -310,7 +309,7 @@ func (r Get) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -356,6 +355,18 @@ func (r *Get) _index(index string) *Get { return r } +// ForceSyntheticSource Should this request force synthetic _source? +// Use this to test if the mapping supports synthetic _source and to get a sense +// of the worst case performance. +// Fetches with this enabled will be slower the enabling synthetic source +// natively in the index. +// API name: force_synthetic_source +func (r *Get) ForceSyntheticSource(forcesyntheticsource bool) *Get { + r.values.Set("force_synthetic_source", strconv.FormatBool(forcesyntheticsource)) + + return r +} + // Preference Specifies the node or shard the operation should be performed on. Random by // default. // API name: preference @@ -441,3 +452,47 @@ func (r *Get) VersionType(versiontype versiontype.VersionType) *Get { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Get) ErrorTrace(errortrace bool) *Get { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Get) FilterPath(filterpaths ...string) *Get { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Get) Human(human bool) *Get { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Get) Pretty(pretty bool) *Get { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/get/response.go b/typedapi/core/get/response.go index b4a9939c2a..12633d6544 100644 --- a/typedapi/core/get/response.go +++ b/typedapi/core/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/get/GetResponse.ts#L23-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/get/GetResponse.ts#L23-L34 type Response struct { Fields map[string]json.RawMessage `json:"fields,omitempty"` Found bool `json:"found"` diff --git a/typedapi/core/getscript/get_script.go b/typedapi/core/getscript/get_script.go index 2ed1ff2590..2e33b01bca 100644 --- a/typedapi/core/getscript/get_script.go +++ b/typedapi/core/getscript/get_script.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns a script. +// Retrieves a stored script or search template. package getscript import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +76,7 @@ func NewGetScriptFunc(tp elastictransport.Interface) NewGetScript { } } -// Returns a script. +// Retrieves a stored script or search template. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html func New(tp elastictransport.Interface) *GetScript { @@ -260,7 +260,7 @@ func (r GetScript) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -304,3 +304,47 @@ func (r *GetScript) MasterTimeout(duration string) *GetScript { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetScript) ErrorTrace(errortrace bool) *GetScript { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetScript) FilterPath(filterpaths ...string) *GetScript { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetScript) Human(human bool) *GetScript { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetScript) Pretty(pretty bool) *GetScript { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/getscript/response.go b/typedapi/core/getscript/response.go index e279377f73..803222e725 100644 --- a/typedapi/core/getscript/response.go +++ b/typedapi/core/getscript/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getscript @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getscript // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/get_script/GetScriptResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/get_script/GetScriptResponse.ts#L23-L29 type Response struct { Found bool `json:"found"` Id_ string `json:"_id"` diff --git a/typedapi/core/getscriptcontext/get_script_context.go b/typedapi/core/getscriptcontext/get_script_context.go index 46a42105c3..7c9019f429 100644 --- a/typedapi/core/getscriptcontext/get_script_context.go +++ b/typedapi/core/getscriptcontext/get_script_context.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns all script contexts. package getscriptcontext @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -246,7 +246,7 @@ func (r GetScriptContext) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -273,3 +273,47 @@ func (r *GetScriptContext) Header(key, value string) *GetScriptContext { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetScriptContext) ErrorTrace(errortrace bool) *GetScriptContext { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetScriptContext) FilterPath(filterpaths ...string) *GetScriptContext { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetScriptContext) Human(human bool) *GetScriptContext { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetScriptContext) Pretty(pretty bool) *GetScriptContext { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/getscriptcontext/response.go b/typedapi/core/getscriptcontext/response.go index 8f1e4e783e..8cbaacfd23 100644 --- a/typedapi/core/getscriptcontext/response.go +++ b/typedapi/core/getscriptcontext/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getscriptcontext @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getscriptcontext // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/get_script_context/GetScriptContextResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/get_script_context/GetScriptContextResponse.ts#L22-L26 type Response struct { Contexts []types.GetScriptContext `json:"contexts"` } diff --git a/typedapi/core/getscriptlanguages/get_script_languages.go b/typedapi/core/getscriptlanguages/get_script_languages.go index ecc193e8ac..ff67251681 100644 --- a/typedapi/core/getscriptlanguages/get_script_languages.go +++ b/typedapi/core/getscriptlanguages/get_script_languages.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns available script types, languages and contexts package getscriptlanguages @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -246,7 +246,7 @@ func (r GetScriptLanguages) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -273,3 +273,47 @@ func (r *GetScriptLanguages) Header(key, value string) *GetScriptLanguages { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetScriptLanguages) ErrorTrace(errortrace bool) *GetScriptLanguages { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetScriptLanguages) FilterPath(filterpaths ...string) *GetScriptLanguages { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetScriptLanguages) Human(human bool) *GetScriptLanguages { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetScriptLanguages) Pretty(pretty bool) *GetScriptLanguages { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/getscriptlanguages/response.go b/typedapi/core/getscriptlanguages/response.go index bfbe54aa6c..d05515e60d 100644 --- a/typedapi/core/getscriptlanguages/response.go +++ b/typedapi/core/getscriptlanguages/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getscriptlanguages @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getscriptlanguages // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/get_script_languages/GetScriptLanguagesResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/get_script_languages/GetScriptLanguagesResponse.ts#L22-L27 type Response struct { LanguageContexts []types.LanguageContext `json:"language_contexts"` TypesAllowed []string `json:"types_allowed"` diff --git a/typedapi/core/getsource/get_source.go b/typedapi/core/getsource/get_source.go index 1234a29ded..86644cbc86 100644 --- a/typedapi/core/getsource/get_source.go +++ b/typedapi/core/getsource/get_source.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns the source of a document. package getsource @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -273,7 +272,7 @@ func (r GetSource) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -401,3 +400,47 @@ func (r *GetSource) VersionType(versiontype versiontype.VersionType) *GetSource return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetSource) ErrorTrace(errortrace bool) *GetSource { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetSource) FilterPath(filterpaths ...string) *GetSource { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetSource) Human(human bool) *GetSource { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetSource) Pretty(pretty bool) *GetSource { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/getsource/response.go b/typedapi/core/getsource/response.go index 1d341b0f29..418a1e3b8a 100644 --- a/typedapi/core/getsource/response.go +++ b/typedapi/core/getsource/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getsource @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getsource // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/get_source/SourceResponse.ts#L20-L23 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/get_source/SourceResponse.ts#L20-L23 type Response = json.RawMessage diff --git a/typedapi/core/healthreport/health_report.go b/typedapi/core/healthreport/health_report.go index 1b373e4bed..b4c8e75af6 100644 --- a/typedapi/core/healthreport/health_report.go +++ b/typedapi/core/healthreport/health_report.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns the health of the cluster. package healthreport @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -264,7 +263,7 @@ func (r HealthReport) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -324,3 +323,47 @@ func (r *HealthReport) Size(size int) *HealthReport { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *HealthReport) ErrorTrace(errortrace bool) *HealthReport { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *HealthReport) FilterPath(filterpaths ...string) *HealthReport { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *HealthReport) Human(human bool) *HealthReport { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *HealthReport) Pretty(pretty bool) *HealthReport { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/healthreport/response.go b/typedapi/core/healthreport/response.go index 4677ed9128..49db947e59 100644 --- a/typedapi/core/healthreport/response.go +++ b/typedapi/core/healthreport/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package healthreport @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package healthreport // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/Response.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/Response.ts#L22-L28 type Response struct { ClusterName string `json:"cluster_name"` Indicators types.Indicators `json:"indicators"` diff --git a/typedapi/core/index/index.go b/typedapi/core/index/index.go index 6d35b41e1b..f133ec0b6f 100644 --- a/typedapi/core/index/index.go +++ b/typedapi/core/index/index.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates or updates a document in an index. +// Adds a JSON document to the specified data stream or index and makes it +// searchable. +// If the target is an index and the document already exists, the request +// updates the document and increments its version. package index import ( @@ -58,8 +61,8 @@ type Index struct { raw io.Reader - req interface{} - deferred []func(request interface{}) error + req any + deferred []func(request any) error buf *gobytes.Buffer paramSet int @@ -87,7 +90,10 @@ func NewIndexFunc(tp elastictransport.Interface) NewIndex { } } -// Creates or updates a document in an index. +// Adds a JSON document to the specified data stream or index and makes it +// searchable. +// If the target is an index and the document already exists, the request +// updates the document and increments its version. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-index_.html func New(tp elastictransport.Interface) *Index { @@ -97,6 +103,8 @@ func New(tp elastictransport.Interface) *Index { headers: make(http.Header), buf: gobytes.NewBuffer(nil), + + req: NewRequest(), } if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { @@ -117,14 +125,14 @@ func (r *Index) Raw(raw io.Reader) *Index { } // Request allows to set the request property with the appropriate payload. -func (r *Index) Request(req interface{}) *Index { +func (r *Index) Request(req any) *Index { r.req = req return r } // Document allows to set the request property with the appropriate payload. -func (r *Index) Document(document interface{}) *Index { +func (r *Index) Document(document any) *Index { r.req = document return r @@ -457,3 +465,47 @@ func (r *Index) RequireAlias(requirealias bool) *Index { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Index) ErrorTrace(errortrace bool) *Index { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Index) FilterPath(filterpaths ...string) *Index { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Index) Human(human bool) *Index { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Index) Pretty(pretty bool) *Index { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/index/request.go b/typedapi/core/index/request.go index ce515c61e6..08adc7d051 100644 --- a/typedapi/core/index/request.go +++ b/typedapi/core/index/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package index @@ -26,5 +26,12 @@ import ( // Request holds the request body struct for the package index // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/index/IndexRequest.ts#L35-L117 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/index/IndexRequest.ts#L35-L117 type Request = json.RawMessage + +// NewRequest returns a Request +func NewRequest() *Request { + r := new(json.RawMessage) + + return r +} diff --git a/typedapi/core/index/response.go b/typedapi/core/index/response.go index f56f3db9af..3fb5c35117 100644 --- a/typedapi/core/index/response.go +++ b/typedapi/core/index/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package index @@ -27,14 +27,14 @@ import ( // Response holds the response body struct for the package index // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/index/IndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/index/IndexResponse.ts#L22-L24 type Response struct { ForcedRefresh *bool `json:"forced_refresh,omitempty"` Id_ string `json:"_id"` Index_ string `json:"_index"` - PrimaryTerm_ int64 `json:"_primary_term"` + PrimaryTerm_ *int64 `json:"_primary_term,omitempty"` Result result.Result `json:"result"` - SeqNo_ int64 `json:"_seq_no"` + SeqNo_ *int64 `json:"_seq_no,omitempty"` Shards_ types.ShardStatistics `json:"_shards"` Version_ int64 `json:"_version"` } diff --git a/typedapi/core/info/info.go b/typedapi/core/info/info.go index 7bb916cb3e..27fe07d2b5 100644 --- a/typedapi/core/info/info.go +++ b/typedapi/core/info/info.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns basic information about the cluster. package info @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -244,7 +244,7 @@ func (r Info) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -271,3 +271,47 @@ func (r *Info) Header(key, value string) *Info { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Info) ErrorTrace(errortrace bool) *Info { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Info) FilterPath(filterpaths ...string) *Info { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Info) Human(human bool) *Info { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Info) Pretty(pretty bool) *Info { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/info/response.go b/typedapi/core/info/response.go index efb34b34a9..5764e56c72 100644 --- a/typedapi/core/info/response.go +++ b/typedapi/core/info/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package info @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package info // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/info/RootNodeInfoResponse.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/info/RootNodeInfoResponse.ts#L23-L31 type Response struct { ClusterName string `json:"cluster_name"` ClusterUuid string `json:"cluster_uuid"` diff --git a/typedapi/core/knnsearch/knn_search.go b/typedapi/core/knnsearch/knn_search.go index 1713275a60..1b0eefd31c 100644 --- a/typedapi/core/knnsearch/knn_search.go +++ b/typedapi/core/knnsearch/knn_search.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Performs a kNN search. package knnsearch @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -321,6 +322,50 @@ func (r *KnnSearch) Routing(routing string) *KnnSearch { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *KnnSearch) ErrorTrace(errortrace bool) *KnnSearch { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *KnnSearch) FilterPath(filterpaths ...string) *KnnSearch { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *KnnSearch) Human(human bool) *KnnSearch { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *KnnSearch) Pretty(pretty bool) *KnnSearch { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // DocvalueFields The request returns doc values for field names matching these patterns // in the hits.fields property of the response. Accepts wildcard (*) patterns. // API name: docvalue_fields diff --git a/typedapi/core/knnsearch/request.go b/typedapi/core/knnsearch/request.go index d1a63b2f31..b09a540b85 100644 --- a/typedapi/core/knnsearch/request.go +++ b/typedapi/core/knnsearch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package knnsearch @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package knnsearch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/knn_search/KnnSearchRequest.ts#L27-L80 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/knn_search/KnnSearchRequest.ts#L27-L80 type Request struct { // DocvalueFields The request returns doc values for field names matching these patterns @@ -65,6 +65,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/core/knnsearch/response.go b/typedapi/core/knnsearch/response.go index 1a2ba750c6..d84f879a62 100644 --- a/typedapi/core/knnsearch/response.go +++ b/typedapi/core/knnsearch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package knnsearch @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package knnsearch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/knn_search/KnnSearchResponse.ts#L26-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/knn_search/KnnSearchResponse.ts#L26-L54 type Response struct { // Fields Contains field values for the documents. These fields diff --git a/typedapi/core/mget/mget.go b/typedapi/core/mget/mget.go index 551d60ecc2..7c747e882d 100644 --- a/typedapi/core/mget/mget.go +++ b/typedapi/core/mget/mget.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Allows to get multiple documents in one request. package mget @@ -317,6 +317,18 @@ func (r *Mget) Index(index string) *Mget { return r } +// ForceSyntheticSource Should this request force synthetic _source? +// Use this to test if the mapping supports synthetic _source and to get a sense +// of the worst case performance. +// Fetches with this enabled will be slower the enabling synthetic source +// natively in the index. +// API name: force_synthetic_source +func (r *Mget) ForceSyntheticSource(forcesyntheticsource bool) *Mget { + r.values.Set("force_synthetic_source", strconv.FormatBool(forcesyntheticsource)) + + return r +} + // Preference Specifies the node or shard the operation should be performed on. Random by // default. // API name: preference @@ -390,6 +402,50 @@ func (r *Mget) StoredFields(fields ...string) *Mget { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Mget) ErrorTrace(errortrace bool) *Mget { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Mget) FilterPath(filterpaths ...string) *Mget { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Mget) Human(human bool) *Mget { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Mget) Pretty(pretty bool) *Mget { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Docs The documents you want to retrieve. Required if no index is specified in the // request URI. // API name: docs diff --git a/typedapi/core/mget/request.go b/typedapi/core/mget/request.go index faddfd869a..12de1a8115 100644 --- a/typedapi/core/mget/request.go +++ b/typedapi/core/mget/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package mget @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package mget // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/mget/MultiGetRequest.ts#L25-L91 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/mget/MultiGetRequest.ts#L25-L98 type Request struct { // Docs The documents you want to retrieve. Required if no index is specified in the @@ -46,6 +46,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/core/mget/response.go b/typedapi/core/mget/response.go index 11a3a594d0..a7b92211b8 100644 --- a/typedapi/core/mget/response.go +++ b/typedapi/core/mget/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package mget @@ -32,7 +32,7 @@ import ( // Response holds the response body struct for the package mget // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/mget/MultiGetResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/mget/MultiGetResponse.ts#L22-L26 type Response struct { Docs []types.MgetResponseItem `json:"docs"` } diff --git a/typedapi/core/msearch/msearch.go b/typedapi/core/msearch/msearch.go index cb49fb8961..1abc03c6f1 100644 --- a/typedapi/core/msearch/msearch.go +++ b/typedapi/core/msearch/msearch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Allows to execute several search operations in one request. package msearch @@ -438,3 +438,47 @@ func (r *Msearch) TypedKeys(typedkeys bool) *Msearch { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Msearch) ErrorTrace(errortrace bool) *Msearch { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Msearch) FilterPath(filterpaths ...string) *Msearch { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Msearch) Human(human bool) *Msearch { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Msearch) Pretty(pretty bool) *Msearch { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/msearch/request.go b/typedapi/core/msearch/request.go index f002762d7c..ec34bbe5fd 100644 --- a/typedapi/core/msearch/request.go +++ b/typedapi/core/msearch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package msearch @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package msearch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/msearch/MultiSearchRequest.ts#L25-L96 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/msearch/MultiSearchRequest.ts#L25-L96 type Request = []types.MsearchRequestItem diff --git a/typedapi/core/msearch/response.go b/typedapi/core/msearch/response.go index b83c758e05..29cd85a202 100644 --- a/typedapi/core/msearch/response.go +++ b/typedapi/core/msearch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package msearch @@ -33,7 +33,7 @@ import ( // Response holds the response body struct for the package msearch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/msearch/MultiSearchResponse.ts#L25-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/msearch/MultiSearchResponse.ts#L25-L27 type Response struct { Responses []types.MsearchResponseItem `json:"responses"` Took int64 `json:"took"` @@ -101,7 +101,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "took": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/core/msearchtemplate/msearch_template.go b/typedapi/core/msearchtemplate/msearch_template.go index 8b4e139b2e..77449308c0 100644 --- a/typedapi/core/msearchtemplate/msearch_template.go +++ b/typedapi/core/msearchtemplate/msearch_template.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Allows to execute several search template operations in one request. +// Runs multiple templated searches with a single request. package msearchtemplate import ( @@ -80,7 +80,7 @@ func NewMsearchTemplateFunc(tp elastictransport.Interface) NewMsearchTemplate { } } -// Allows to execute several search template operations in one request. +// Runs multiple templated searches with a single request. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-multi-search.html func New(tp elastictransport.Interface) *MsearchTemplate { @@ -371,3 +371,47 @@ func (r *MsearchTemplate) TypedKeys(typedkeys bool) *MsearchTemplate { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *MsearchTemplate) ErrorTrace(errortrace bool) *MsearchTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *MsearchTemplate) FilterPath(filterpaths ...string) *MsearchTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *MsearchTemplate) Human(human bool) *MsearchTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *MsearchTemplate) Pretty(pretty bool) *MsearchTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/msearchtemplate/request.go b/typedapi/core/msearchtemplate/request.go index 6a0e705b5d..947621f35b 100644 --- a/typedapi/core/msearchtemplate/request.go +++ b/typedapi/core/msearchtemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package msearchtemplate @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package msearchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/msearch_template/MultiSearchTemplateRequest.ts#L25-L70 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/msearch_template/MultiSearchTemplateRequest.ts#L25-L70 type Request = []types.RequestItem diff --git a/typedapi/core/msearchtemplate/response.go b/typedapi/core/msearchtemplate/response.go index 30d50d7afe..bf5d88140b 100644 --- a/typedapi/core/msearchtemplate/response.go +++ b/typedapi/core/msearchtemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package msearchtemplate @@ -33,7 +33,7 @@ import ( // Response holds the response body struct for the package msearchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/msearch_template/MultiSearchTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/msearch_template/MultiSearchTemplateResponse.ts#L22-L24 type Response struct { Responses []types.MsearchResponseItem `json:"responses"` Took int64 `json:"took"` @@ -101,7 +101,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "took": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/core/mtermvectors/mtermvectors.go b/typedapi/core/mtermvectors/mtermvectors.go index 5822d07159..ff6c78b987 100644 --- a/typedapi/core/mtermvectors/mtermvectors.go +++ b/typedapi/core/mtermvectors/mtermvectors.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns multiple termvectors in one request. package mtermvectors @@ -410,6 +410,50 @@ func (r *Mtermvectors) VersionType(versiontype versiontype.VersionType) *Mtermve return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Mtermvectors) ErrorTrace(errortrace bool) *Mtermvectors { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Mtermvectors) FilterPath(filterpaths ...string) *Mtermvectors { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Mtermvectors) Human(human bool) *Mtermvectors { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Mtermvectors) Pretty(pretty bool) *Mtermvectors { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Docs Array of existing or artificial documents. // API name: docs func (r *Mtermvectors) Docs(docs ...types.MTermVectorsOperation) *Mtermvectors { diff --git a/typedapi/core/mtermvectors/request.go b/typedapi/core/mtermvectors/request.go index 328736ed2e..71d17b6551 100644 --- a/typedapi/core/mtermvectors/request.go +++ b/typedapi/core/mtermvectors/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package mtermvectors @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package mtermvectors // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/mtermvectors/MultiTermVectorsRequest.ts#L31-L109 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/mtermvectors/MultiTermVectorsRequest.ts#L31-L109 type Request struct { // Docs Array of existing or artificial documents. @@ -42,6 +42,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/core/mtermvectors/response.go b/typedapi/core/mtermvectors/response.go index 8645c090d5..681725692d 100644 --- a/typedapi/core/mtermvectors/response.go +++ b/typedapi/core/mtermvectors/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package mtermvectors @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mtermvectors // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/mtermvectors/MultiTermVectorsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/mtermvectors/MultiTermVectorsResponse.ts#L22-L24 type Response struct { Docs []types.TermVectorsResult `json:"docs"` } diff --git a/typedapi/core/openpointintime/open_point_in_time.go b/typedapi/core/openpointintime/open_point_in_time.go index d65978a20e..aecfdd17a6 100644 --- a/typedapi/core/openpointintime/open_point_in_time.go +++ b/typedapi/core/openpointintime/open_point_in_time.go @@ -16,9 +16,19 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Open a point in time that can be used in subsequent searches +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// A search request by default executes against the most recent visible data of +// the target indices, +// which is called point in time. Elasticsearch pit (point in time) is a +// lightweight view into the +// state of the data as it existed when initiated. In some cases, it’s preferred +// to perform multiple +// search requests using the same point in time. For example, if refreshes +// happen between +// `search_after` requests, then the results of those requests might not be +// consistent as changes happening +// between searches are only visible to the more recent point in time. package openpointintime import ( @@ -27,7 +37,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -78,7 +87,17 @@ func NewOpenPointInTimeFunc(tp elastictransport.Interface) NewOpenPointInTime { } } -// Open a point in time that can be used in subsequent searches +// A search request by default executes against the most recent visible data of +// the target indices, +// which is called point in time. Elasticsearch pit (point in time) is a +// lightweight view into the +// state of the data as it existed when initiated. In some cases, it’s preferred +// to perform multiple +// search requests using the same point in time. For example, if refreshes +// happen between +// `search_after` requests, then the results of those requests might not be +// consistent as changes happening +// between searches are only visible to the more recent point in time. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/point-in-time-api.html func New(tp elastictransport.Interface) *OpenPointInTime { @@ -137,6 +156,12 @@ func (r *OpenPointInTime) HttpRequest(ctx context.Context) (*http.Request, error req.Header = r.headers.Clone() + if req.Header.Get("Content-Type") == "" { + if r.raw != nil { + req.Header.Set("Content-Type", "application/vnd.elasticsearch+json;compatible-with=8") + } + } + if req.Header.Get("Accept") == "" { req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") } @@ -262,7 +287,7 @@ func (r OpenPointInTime) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -349,3 +374,47 @@ func (r *OpenPointInTime) ExpandWildcards(expandwildcards ...expandwildcard.Expa return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *OpenPointInTime) ErrorTrace(errortrace bool) *OpenPointInTime { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *OpenPointInTime) FilterPath(filterpaths ...string) *OpenPointInTime { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *OpenPointInTime) Human(human bool) *OpenPointInTime { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *OpenPointInTime) Pretty(pretty bool) *OpenPointInTime { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/openpointintime/response.go b/typedapi/core/openpointintime/response.go index dca5af3e6d..b84e6cabd8 100644 --- a/typedapi/core/openpointintime/response.go +++ b/typedapi/core/openpointintime/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package openpointintime // Response holds the response body struct for the package openpointintime // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/open_point_in_time/OpenPointInTimeResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/open_point_in_time/OpenPointInTimeResponse.ts#L22-L24 type Response struct { Id string `json:"id"` } diff --git a/typedapi/core/ping/ping.go b/typedapi/core/ping/ping.go index 9a58479a62..5f01a9cdc8 100644 --- a/typedapi/core/ping/ping.go +++ b/typedapi/core/ping/ping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns whether the cluster is running. package ping @@ -26,9 +26,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -193,7 +193,7 @@ func (r Ping) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -220,3 +220,47 @@ func (r *Ping) Header(key, value string) *Ping { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Ping) ErrorTrace(errortrace bool) *Ping { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Ping) FilterPath(filterpaths ...string) *Ping { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Ping) Human(human bool) *Ping { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Ping) Pretty(pretty bool) *Ping { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/putscript/put_script.go b/typedapi/core/putscript/put_script.go index e4aabc8492..7b55904f35 100644 --- a/typedapi/core/putscript/put_script.go +++ b/typedapi/core/putscript/put_script.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates or updates a script. +// Creates or updates a stored script or search template. package putscript import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -83,7 +84,7 @@ func NewPutScriptFunc(tp elastictransport.Interface) NewPutScript { } } -// Creates or updates a script. +// Creates or updates a stored script or search template. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting.html func New(tp elastictransport.Interface) *PutScript { @@ -364,6 +365,50 @@ func (r *PutScript) Timeout(duration string) *PutScript { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutScript) ErrorTrace(errortrace bool) *PutScript { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutScript) FilterPath(filterpaths ...string) *PutScript { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutScript) Human(human bool) *PutScript { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutScript) Pretty(pretty bool) *PutScript { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Script Contains the script or search template, its parameters, and its language. // API name: script func (r *PutScript) Script(script *types.StoredScript) *PutScript { diff --git a/typedapi/core/putscript/request.go b/typedapi/core/putscript/request.go index 71760ac6d1..4233af1121 100644 --- a/typedapi/core/putscript/request.go +++ b/typedapi/core/putscript/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putscript @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putscript // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/put_script/PutScriptRequest.ts#L25-L64 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/put_script/PutScriptRequest.ts#L25-L64 type Request struct { // Script Contains the script or search template, its parameters, and its language. @@ -39,6 +39,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/core/putscript/response.go b/typedapi/core/putscript/response.go index 6ca91a46f9..2d2729640b 100644 --- a/typedapi/core/putscript/response.go +++ b/typedapi/core/putscript/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putscript // Response holds the response body struct for the package putscript // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/put_script/PutScriptResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/put_script/PutScriptResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/core/rankeval/rank_eval.go b/typedapi/core/rankeval/rank_eval.go index c2d18bb144..921ed369f8 100644 --- a/typedapi/core/rankeval/rank_eval.go +++ b/typedapi/core/rankeval/rank_eval.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Allows to evaluate the quality of ranked search results over a set of typical -// search queries +// Enables you to evaluate the quality of ranked search results over a set of +// typical search queries. package rankeval import ( @@ -81,8 +81,8 @@ func NewRankEvalFunc(tp elastictransport.Interface) NewRankEval { } } -// Allows to evaluate the quality of ranked search results over a set of typical -// search queries +// Enables you to evaluate the quality of ranked search results over a set of +// typical search queries. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-rank-eval.html func New(tp elastictransport.Interface) *RankEval { @@ -363,6 +363,50 @@ func (r *RankEval) SearchType(searchtype string) *RankEval { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *RankEval) ErrorTrace(errortrace bool) *RankEval { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *RankEval) FilterPath(filterpaths ...string) *RankEval { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *RankEval) Human(human bool) *RankEval { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *RankEval) Pretty(pretty bool) *RankEval { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Metric Definition of the evaluation metric to calculate. // API name: metric func (r *RankEval) Metric(metric *types.RankEvalMetric) *RankEval { diff --git a/typedapi/core/rankeval/request.go b/typedapi/core/rankeval/request.go index 5d0a014ef3..115fdec279 100644 --- a/typedapi/core/rankeval/request.go +++ b/typedapi/core/rankeval/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package rankeval @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package rankeval // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/RankEvalRequest.ts#L24-L61 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/RankEvalRequest.ts#L24-L61 type Request struct { // Metric Definition of the evaluation metric to calculate. @@ -41,6 +41,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/core/rankeval/response.go b/typedapi/core/rankeval/response.go index 08fd3c7740..2de93e93e0 100644 --- a/typedapi/core/rankeval/response.go +++ b/typedapi/core/rankeval/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package rankeval @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package rankeval // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/RankEvalResponse.ts#L26-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/RankEvalResponse.ts#L26-L34 type Response struct { // Details The details section contains one entry for every query in the original diff --git a/typedapi/core/reindex/reindex.go b/typedapi/core/reindex/reindex.go index 2cd86efc73..c413d20677 100644 --- a/typedapi/core/reindex/reindex.go +++ b/typedapi/core/reindex/reindex.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Allows to copy documents from one index to another, optionally filtering the // source @@ -371,6 +371,50 @@ func (r *Reindex) RequireAlias(requirealias bool) *Reindex { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Reindex) ErrorTrace(errortrace bool) *Reindex { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Reindex) FilterPath(filterpaths ...string) *Reindex { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Reindex) Human(human bool) *Reindex { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Reindex) Pretty(pretty bool) *Reindex { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Conflicts Set to proceed to continue reindexing even if there are conflicts. // API name: conflicts func (r *Reindex) Conflicts(conflicts conflicts.Conflicts) *Reindex { diff --git a/typedapi/core/reindex/request.go b/typedapi/core/reindex/request.go index cac3f0bdc2..a409ca2a86 100644 --- a/typedapi/core/reindex/request.go +++ b/typedapi/core/reindex/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package reindex @@ -34,7 +34,7 @@ import ( // Request holds the request body struct for the package reindex // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/reindex/ReindexRequest.ts#L27-L101 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/reindex/ReindexRequest.ts#L27-L101 type Request struct { // Conflicts Set to proceed to continue reindexing even if there are conflicts. @@ -53,6 +53,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -93,7 +94,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "max_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -144,7 +145,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/core/reindex/response.go b/typedapi/core/reindex/response.go index 4a3db6a4d0..9ed5062f61 100644 --- a/typedapi/core/reindex/response.go +++ b/typedapi/core/reindex/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package reindex @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package reindex // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/reindex/ReindexResponse.ts#L26-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/reindex/ReindexResponse.ts#L26-L45 type Response struct { Batches *int64 `json:"batches,omitempty"` Created *int64 `json:"created,omitempty"` diff --git a/typedapi/core/reindexrethrottle/reindex_rethrottle.go b/typedapi/core/reindexrethrottle/reindex_rethrottle.go index 461f631eee..706214791e 100644 --- a/typedapi/core/reindexrethrottle/reindex_rethrottle.go +++ b/typedapi/core/reindexrethrottle/reindex_rethrottle.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Changes the number of requests per second for a particular Reindex operation. +// Copies documents from a source to a destination. package reindexrethrottle import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +76,7 @@ func NewReindexRethrottleFunc(tp elastictransport.Interface) NewReindexRethrottl } } -// Changes the number of requests per second for a particular Reindex operation. +// Copies documents from a source to a destination. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html func New(tp elastictransport.Interface) *ReindexRethrottle { @@ -262,7 +262,7 @@ func (r ReindexRethrottle) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -306,3 +306,47 @@ func (r *ReindexRethrottle) RequestsPerSecond(requestspersecond string) *Reindex return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ReindexRethrottle) ErrorTrace(errortrace bool) *ReindexRethrottle { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ReindexRethrottle) FilterPath(filterpaths ...string) *ReindexRethrottle { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ReindexRethrottle) Human(human bool) *ReindexRethrottle { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ReindexRethrottle) Pretty(pretty bool) *ReindexRethrottle { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/reindexrethrottle/response.go b/typedapi/core/reindexrethrottle/response.go index 65cd668e97..a3a9382031 100644 --- a/typedapi/core/reindexrethrottle/response.go +++ b/typedapi/core/reindexrethrottle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package reindexrethrottle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package reindexrethrottle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/reindex_rethrottle/ReindexRethrottleResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/reindex_rethrottle/ReindexRethrottleResponse.ts#L23-L25 type Response struct { Nodes map[string]types.ReindexNode `json:"nodes"` } diff --git a/typedapi/core/rendersearchtemplate/render_search_template.go b/typedapi/core/rendersearchtemplate/render_search_template.go index 974a50874a..6250167b84 100644 --- a/typedapi/core/rendersearchtemplate/render_search_template.go +++ b/typedapi/core/rendersearchtemplate/render_search_template.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Allows to use the Mustache language to pre-render a search definition. +// Renders a search template as a search request body. package rendersearchtemplate import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -78,7 +79,7 @@ func NewRenderSearchTemplateFunc(tp elastictransport.Interface) NewRenderSearchT } } -// Allows to use the Mustache language to pre-render a search definition. +// Renders a search template as a search request body. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/render-search-template-api.html func New(tp elastictransport.Interface) *RenderSearchTemplate { @@ -321,6 +322,50 @@ func (r *RenderSearchTemplate) Id(id string) *RenderSearchTemplate { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *RenderSearchTemplate) ErrorTrace(errortrace bool) *RenderSearchTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *RenderSearchTemplate) FilterPath(filterpaths ...string) *RenderSearchTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *RenderSearchTemplate) Human(human bool) *RenderSearchTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *RenderSearchTemplate) Pretty(pretty bool) *RenderSearchTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: file func (r *RenderSearchTemplate) File(file string) *RenderSearchTemplate { diff --git a/typedapi/core/rendersearchtemplate/request.go b/typedapi/core/rendersearchtemplate/request.go index 4d42ea2c8a..0e9d97c550 100644 --- a/typedapi/core/rendersearchtemplate/request.go +++ b/typedapi/core/rendersearchtemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package rendersearchtemplate @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package rendersearchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/render_search_template/RenderSearchTemplateRequest.ts#L25-L55 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/render_search_template/RenderSearchTemplateRequest.ts#L25-L55 type Request struct { File *string `json:"file,omitempty"` // Params Key-value pairs used to replace Mustache variables in the template. @@ -46,6 +46,7 @@ func NewRequest() *Request { r := &Request{ Params: make(map[string]json.RawMessage, 0), } + return r } diff --git a/typedapi/core/rendersearchtemplate/response.go b/typedapi/core/rendersearchtemplate/response.go index 9f523f3bca..3de5143e96 100644 --- a/typedapi/core/rendersearchtemplate/response.go +++ b/typedapi/core/rendersearchtemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package rendersearchtemplate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package rendersearchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/render_search_template/RenderSearchTemplateResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/render_search_template/RenderSearchTemplateResponse.ts#L23-L25 type Response struct { TemplateOutput map[string]json.RawMessage `json:"template_output"` } diff --git a/typedapi/core/scriptspainlessexecute/request.go b/typedapi/core/scriptspainlessexecute/request.go index 66ddd95835..cd5a4dfa75 100644 --- a/typedapi/core/scriptspainlessexecute/request.go +++ b/typedapi/core/scriptspainlessexecute/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package scriptspainlessexecute @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package scriptspainlessexecute // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/scripts_painless_execute/ExecutePainlessScriptRequest.ts#L24-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/scripts_painless_execute/ExecutePainlessScriptRequest.ts#L24-L46 type Request struct { // Context The context that the script should run in. @@ -43,6 +43,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/core/scriptspainlessexecute/response.go b/typedapi/core/scriptspainlessexecute/response.go index c69f5eb134..7b4be79d72 100644 --- a/typedapi/core/scriptspainlessexecute/response.go +++ b/typedapi/core/scriptspainlessexecute/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package scriptspainlessexecute @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package scriptspainlessexecute // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/scripts_painless_execute/ExecutePainlessScriptResponse.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/scripts_painless_execute/ExecutePainlessScriptResponse.ts#L20-L24 type Response struct { Result json.RawMessage `json:"result,omitempty"` } diff --git a/typedapi/core/scriptspainlessexecute/scripts_painless_execute.go b/typedapi/core/scriptspainlessexecute/scripts_painless_execute.go index bb838f6812..813c6e6886 100644 --- a/typedapi/core/scriptspainlessexecute/scripts_painless_execute.go +++ b/typedapi/core/scriptspainlessexecute/scripts_painless_execute.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Allows an arbitrary script to be executed and a result to be returned +// Runs a script and returns a result. package scriptspainlessexecute import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -72,7 +73,7 @@ func NewScriptsPainlessExecuteFunc(tp elastictransport.Interface) NewScriptsPain } } -// Allows an arbitrary script to be executed and a result to be returned +// Runs a script and returns a result. // // https://www.elastic.co/guide/en/elasticsearch/painless/current/painless-execute-api.html func New(tp elastictransport.Interface) *ScriptsPainlessExecute { @@ -293,6 +294,50 @@ func (r *ScriptsPainlessExecute) Header(key, value string) *ScriptsPainlessExecu return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ScriptsPainlessExecute) ErrorTrace(errortrace bool) *ScriptsPainlessExecute { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ScriptsPainlessExecute) FilterPath(filterpaths ...string) *ScriptsPainlessExecute { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ScriptsPainlessExecute) Human(human bool) *ScriptsPainlessExecute { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ScriptsPainlessExecute) Pretty(pretty bool) *ScriptsPainlessExecute { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Context The context that the script should run in. // API name: context func (r *ScriptsPainlessExecute) Context(context string) *ScriptsPainlessExecute { diff --git a/typedapi/core/scroll/request.go b/typedapi/core/scroll/request.go index be6d430a96..acdcd117fb 100644 --- a/typedapi/core/scroll/request.go +++ b/typedapi/core/scroll/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package scroll @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package scroll // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/scroll/ScrollRequest.ts#L24-L59 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/scroll/ScrollRequest.ts#L24-L59 type Request struct { // Scroll Period to retain the search context for scrolling. @@ -44,6 +44,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/core/scroll/response.go b/typedapi/core/scroll/response.go index 1fb9d7b952..34f66cdc3a 100644 --- a/typedapi/core/scroll/response.go +++ b/typedapi/core/scroll/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package scroll @@ -34,7 +34,7 @@ import ( // Response holds the response body struct for the package scroll // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/scroll/ScrollResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/scroll/ScrollResponse.ts#L22-L24 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` Clusters_ *types.ClusterStatistics `json:"_clusters,omitempty"` @@ -532,7 +532,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := types.NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -582,7 +582,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -592,7 +592,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -620,7 +620,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "max_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -636,7 +636,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "num_reduce_phases": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -714,7 +714,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -724,7 +724,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Suggest") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -734,7 +734,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "terminated_early": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -748,7 +748,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "timed_out": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -762,7 +762,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "took": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/core/scroll/scroll.go b/typedapi/core/scroll/scroll.go index abbe82495d..f8c6bf1fa7 100644 --- a/typedapi/core/scroll/scroll.go +++ b/typedapi/core/scroll/scroll.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Allows to retrieve a large numbers of results from a single search request. package scroll @@ -303,6 +303,50 @@ func (r *Scroll) RestTotalHitsAsInt(resttotalhitsasint bool) *Scroll { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Scroll) ErrorTrace(errortrace bool) *Scroll { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Scroll) FilterPath(filterpaths ...string) *Scroll { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Scroll) Human(human bool) *Scroll { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Scroll) Pretty(pretty bool) *Scroll { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Scroll Period to retain the search context for scrolling. // API name: scroll func (r *Scroll) Scroll(duration types.Duration) *Scroll { diff --git a/typedapi/core/search/request.go b/typedapi/core/search/request.go index 99e4b3dd90..e4212fa395 100644 --- a/typedapi/core/search/request.go +++ b/typedapi/core/search/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package search @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/SearchRequest.ts#L53-L506 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/SearchRequest.ts#L54-L520 type Request struct { // Aggregations Defines the aggregations that are run as part of the search request. @@ -65,7 +65,7 @@ type Request struct { // IndicesBoost Boosts the _score of documents from specified indices. IndicesBoost []map[string]types.Float64 `json:"indices_boost,omitempty"` // Knn Defines the approximate kNN search to run. - Knn []types.KnnQuery `json:"knn,omitempty"` + Knn []types.KnnSearch `json:"knn,omitempty"` // MinScore Minimum `_score` for matching documents. // Documents with a lower `_score` are not included in the search results. MinScore *types.Float64 `json:"min_score,omitempty"` @@ -88,6 +88,10 @@ type Request struct { // Rescore Can be used to improve precision by reordering just the top (for example 100 // - 500) documents returned by the `query` and `post_filter` phases. Rescore []types.Rescore `json:"rescore,omitempty"` + // Retriever A retriever is a specification to describe top documents returned from a + // search. A retriever replaces other elements of the search API that also + // return top documents such as query and knn. + Retriever *types.RetrieverContainer `json:"retriever,omitempty"` // RuntimeMappings Defines one or more runtime fields in the search request. // These fields take precedence over mapped fields with the same name. RuntimeMappings types.RuntimeFields `json:"runtime_mappings,omitempty"` @@ -161,6 +165,7 @@ func NewRequest() *Request { Ext: make(map[string]json.RawMessage, 0), ScriptFields: make(map[string]types.ScriptField, 0), } + return r } @@ -209,7 +214,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "explain": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -237,7 +242,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "from": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -265,7 +270,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { rawMsg := json.RawMessage{} dec.Decode(&rawMsg) if !bytes.HasPrefix(rawMsg, []byte("[")) { - o := types.NewKnnQuery() + o := types.NewKnnSearch() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Knn", err) } @@ -278,7 +283,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "min_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -304,7 +309,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "profile": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -343,6 +348,11 @@ func (s *Request) UnmarshalJSON(data []byte) error { } } + case "retriever": + if err := dec.Decode(&s.Retriever); err != nil { + return fmt.Errorf("%s | %w", "Retriever", err) + } + case "runtime_mappings": if err := dec.Decode(&s.RuntimeMappings); err != nil { return fmt.Errorf("%s | %w", "RuntimeMappings", err) @@ -362,7 +372,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "seq_no_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -377,7 +387,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -444,7 +454,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "terminate_after": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -471,7 +481,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Timeout = &o case "track_scores": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -490,7 +500,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "version": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/core/search/response.go b/typedapi/core/search/response.go index 38f36e61ef..de29f1aae7 100644 --- a/typedapi/core/search/response.go +++ b/typedapi/core/search/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package search @@ -34,7 +34,7 @@ import ( // Response holds the response body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/SearchResponse.ts#L34-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/SearchResponse.ts#L34-L36 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` Clusters_ *types.ClusterStatistics `json:"_clusters,omitempty"` @@ -532,7 +532,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := types.NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -582,7 +582,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -592,7 +592,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -620,7 +620,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "max_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -636,7 +636,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "num_reduce_phases": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -714,7 +714,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -724,7 +724,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Suggest") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -734,7 +734,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "terminated_early": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -748,7 +748,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "timed_out": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -762,7 +762,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "took": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/core/search/search.go b/typedapi/core/search/search.go index b2d1e945a4..571dddaf4f 100644 --- a/typedapi/core/search/search.go +++ b/typedapi/core/search/search.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns results matching a query. +// Returns search hits that match the query defined in the request. +// You can provide search queries using the `q` query string parameter or the +// request body. +// If both are specified, only the query parameter is used. package search import ( @@ -83,7 +86,10 @@ func NewSearchFunc(tp elastictransport.Interface) NewSearch { } } -// Returns results matching a query. +// Returns search hits that match the query defined in the request. +// You can provide search queries using the `q` query string parameter or the +// request body. +// If both are specified, only the query parameter is used. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-search.html func New(tp elastictransport.Interface) *Search { @@ -639,6 +645,62 @@ func (r *Search) Q(q string) *Search { return r } +// ForceSyntheticSource Should this request force synthetic _source? +// Use this to test if the mapping supports synthetic _source and to get a sense +// of the worst case performance. +// Fetches with this enabled will be slower the enabling synthetic source +// natively in the index. +// API name: force_synthetic_source +func (r *Search) ForceSyntheticSource(forcesyntheticsource bool) *Search { + r.values.Set("force_synthetic_source", strconv.FormatBool(forcesyntheticsource)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Search) ErrorTrace(errortrace bool) *Search { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Search) FilterPath(filterpaths ...string) *Search { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Search) Human(human bool) *Search { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Search) Pretty(pretty bool) *Search { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Aggregations Defines the aggregations that are run as part of the search request. // API name: aggregations func (r *Search) Aggregations(aggregations map[string]types.Aggregations) *Search { @@ -727,7 +789,7 @@ func (r *Search) IndicesBoost(indicesboosts ...map[string]types.Float64) *Search // Knn Defines the approximate kNN search to run. // API name: knn -func (r *Search) Knn(knns ...types.KnnQuery) *Search { +func (r *Search) Knn(knns ...types.KnnSearch) *Search { r.req.Knn = knns return r @@ -802,6 +864,17 @@ func (r *Search) Rescore(rescores ...types.Rescore) *Search { return r } +// Retriever A retriever is a specification to describe top documents returned from a +// search. A retriever replaces other elements of the search API that also +// return top documents such as query and knn. +// API name: retriever +func (r *Search) Retriever(retriever *types.RetrieverContainer) *Search { + + r.req.Retriever = retriever + + return r +} + // RuntimeMappings Defines one or more runtime fields in the search request. // These fields take precedence over mapped fields with the same name. // API name: runtime_mappings diff --git a/typedapi/core/searchmvt/request.go b/typedapi/core/searchmvt/request.go index c53fb11d1a..87317385da 100644 --- a/typedapi/core/searchmvt/request.go +++ b/typedapi/core/searchmvt/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package searchmvt @@ -35,7 +35,7 @@ import ( // Request holds the request body struct for the package searchmvt // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search_mvt/SearchMvtRequest.ts#L33-L188 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search_mvt/SearchMvtRequest.ts#L33-L188 type Request struct { // Aggs Sub-aggregations for the geotile_grid. @@ -109,6 +109,7 @@ func NewRequest() *Request { r := &Request{ Aggs: make(map[string]types.Aggregations, 0), } + return r } @@ -148,7 +149,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "buffer": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -163,7 +164,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "exact_bounds": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -178,7 +179,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "extent": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -215,7 +216,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "grid_precision": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -246,7 +247,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -282,7 +283,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "with_labels": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/core/searchmvt/response.go b/typedapi/core/searchmvt/response.go index cd6f646bbc..f62f1f13d2 100644 --- a/typedapi/core/searchmvt/response.go +++ b/typedapi/core/searchmvt/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package searchmvt // Response holds the response body struct for the package searchmvt // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search_mvt/SearchMvtResponse.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search_mvt/SearchMvtResponse.ts#L22-L25 type Response = []byte diff --git a/typedapi/core/searchmvt/search_mvt.go b/typedapi/core/searchmvt/search_mvt.go index 6c2a79bfb5..392ad419e8 100644 --- a/typedapi/core/searchmvt/search_mvt.go +++ b/typedapi/core/searchmvt/search_mvt.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Searches a vector tile for geospatial values. Returns results as a binary // Mapbox vector tile. @@ -31,6 +31,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -396,6 +397,50 @@ func (r *SearchMvt) _y(y string) *SearchMvt { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *SearchMvt) ErrorTrace(errortrace bool) *SearchMvt { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *SearchMvt) FilterPath(filterpaths ...string) *SearchMvt { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *SearchMvt) Human(human bool) *SearchMvt { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *SearchMvt) Pretty(pretty bool) *SearchMvt { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Aggs Sub-aggregations for the geotile_grid. // // Supports the following aggregation types: diff --git a/typedapi/core/searchshards/response.go b/typedapi/core/searchshards/response.go index 23d1948da0..e94b52db15 100644 --- a/typedapi/core/searchshards/response.go +++ b/typedapi/core/searchshards/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package searchshards @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package searchshards // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search_shards/SearchShardsResponse.ts#L25-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search_shards/SearchShardsResponse.ts#L25-L31 type Response struct { Indices map[string]types.ShardStoreIndex `json:"indices"` Nodes map[string]types.NodeAttributes `json:"nodes"` diff --git a/typedapi/core/searchshards/search_shards.go b/typedapi/core/searchshards/search_shards.go index cf84e137d5..acda27a456 100644 --- a/typedapi/core/searchshards/search_shards.go +++ b/typedapi/core/searchshards/search_shards.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns information about the indices and shards that a search request would // be executed against. @@ -28,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -267,7 +266,7 @@ func (r SearchShards) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -366,3 +365,47 @@ func (r *SearchShards) Routing(routing string) *SearchShards { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *SearchShards) ErrorTrace(errortrace bool) *SearchShards { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *SearchShards) FilterPath(filterpaths ...string) *SearchShards { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *SearchShards) Human(human bool) *SearchShards { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *SearchShards) Pretty(pretty bool) *SearchShards { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/core/searchtemplate/request.go b/typedapi/core/searchtemplate/request.go index ec20ae20f8..008363178c 100644 --- a/typedapi/core/searchtemplate/request.go +++ b/typedapi/core/searchtemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package searchtemplate @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package searchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search_template/SearchTemplateRequest.ts#L32-L134 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search_template/SearchTemplateRequest.ts#L32-L134 type Request struct { // Explain If `true`, returns detailed information about score calculation as part of @@ -57,6 +57,7 @@ func NewRequest() *Request { r := &Request{ Params: make(map[string]json.RawMessage, 0), } + return r } @@ -87,7 +88,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "explain": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +115,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "profile": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/core/searchtemplate/response.go b/typedapi/core/searchtemplate/response.go index f13eac6070..d70ca0307f 100644 --- a/typedapi/core/searchtemplate/response.go +++ b/typedapi/core/searchtemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package searchtemplate @@ -34,7 +34,7 @@ import ( // Response holds the response body struct for the package searchtemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search_template/SearchTemplateResponse.ts#L30-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search_template/SearchTemplateResponse.ts#L30-L48 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` Clusters_ *types.ClusterStatistics `json:"_clusters,omitempty"` @@ -532,7 +532,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := types.NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -582,7 +582,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -592,7 +592,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -620,7 +620,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "max_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -636,7 +636,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "num_reduce_phases": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -714,7 +714,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -724,7 +724,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Suggest") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -734,7 +734,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "terminated_early": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -748,7 +748,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "timed_out": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -762,7 +762,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "took": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/core/searchtemplate/search_template.go b/typedapi/core/searchtemplate/search_template.go index 1534968977..79ba0961c0 100644 --- a/typedapi/core/searchtemplate/search_template.go +++ b/typedapi/core/searchtemplate/search_template.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Allows to use the Mustache language to pre-render a search definition. +// Runs a search with a search template. package searchtemplate import ( @@ -81,7 +81,7 @@ func NewSearchTemplateFunc(tp elastictransport.Interface) NewSearchTemplate { } } -// Allows to use the Mustache language to pre-render a search definition. +// Runs a search with a search template. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-template.html func New(tp elastictransport.Interface) *SearchTemplate { @@ -431,6 +431,50 @@ func (r *SearchTemplate) TypedKeys(typedkeys bool) *SearchTemplate { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *SearchTemplate) ErrorTrace(errortrace bool) *SearchTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *SearchTemplate) FilterPath(filterpaths ...string) *SearchTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *SearchTemplate) Human(human bool) *SearchTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *SearchTemplate) Pretty(pretty bool) *SearchTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Explain If `true`, returns detailed information about score calculation as part of // each hit. // API name: explain diff --git a/typedapi/core/termsenum/request.go b/typedapi/core/termsenum/request.go index e2bbe7146c..66b58877be 100644 --- a/typedapi/core/termsenum/request.go +++ b/typedapi/core/termsenum/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package termsenum @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package termsenum // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/terms_enum/TermsEnumRequest.ts#L26-L65 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/terms_enum/TermsEnumRequest.ts#L26-L65 type Request struct { // CaseInsensitive When true the provided search string is matched against index terms without @@ -60,6 +60,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -90,7 +91,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "case_insensitive": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -127,7 +128,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/core/termsenum/response.go b/typedapi/core/termsenum/response.go index 588e0f8fa5..93ac4cef3e 100644 --- a/typedapi/core/termsenum/response.go +++ b/typedapi/core/termsenum/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package termsenum @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package termsenum // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/terms_enum/TermsEnumResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/terms_enum/TermsEnumResponse.ts#L22-L28 type Response struct { Complete bool `json:"complete"` Shards_ types.ShardStatistics `json:"_shards"` diff --git a/typedapi/core/termsenum/terms_enum.go b/typedapi/core/termsenum/terms_enum.go index 5e0a2fac03..4648f4fc1d 100644 --- a/typedapi/core/termsenum/terms_enum.go +++ b/typedapi/core/termsenum/terms_enum.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // The terms enum API can be used to discover terms in the index that begin // with the provided string. It is designed for low-latency look-ups used in @@ -32,6 +32,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -317,6 +318,50 @@ func (r *TermsEnum) _index(index string) *TermsEnum { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *TermsEnum) ErrorTrace(errortrace bool) *TermsEnum { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *TermsEnum) FilterPath(filterpaths ...string) *TermsEnum { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *TermsEnum) Human(human bool) *TermsEnum { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *TermsEnum) Pretty(pretty bool) *TermsEnum { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // CaseInsensitive When true the provided search string is matched against index terms without // case sensitivity. // API name: case_insensitive diff --git a/typedapi/core/termvectors/request.go b/typedapi/core/termvectors/request.go index 0d9b977593..4687d17275 100644 --- a/typedapi/core/termvectors/request.go +++ b/typedapi/core/termvectors/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package termvectors @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package termvectors // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/termvectors/TermVectorsRequest.ts#L33-L118 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/termvectors/TermVectorsRequest.ts#L33-L118 type Request struct { // Doc An artificial document (a document not present in the index) for which you @@ -46,6 +46,7 @@ func NewRequest() *Request { r := &Request{ PerFieldAnalyzer: make(map[string]string, 0), } + return r } diff --git a/typedapi/core/termvectors/response.go b/typedapi/core/termvectors/response.go index 2e42e1a69c..0540135149 100644 --- a/typedapi/core/termvectors/response.go +++ b/typedapi/core/termvectors/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package termvectors @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package termvectors // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/termvectors/TermVectorsResponse.ts#L25-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/termvectors/TermVectorsResponse.ts#L25-L34 type Response struct { Found bool `json:"found"` Id_ string `json:"_id"` diff --git a/typedapi/core/termvectors/termvectors.go b/typedapi/core/termvectors/termvectors.go index 494c22f771..52ded1e91a 100644 --- a/typedapi/core/termvectors/termvectors.go +++ b/typedapi/core/termvectors/termvectors.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns information and statistics about terms in the fields of a particular // document. @@ -438,6 +438,50 @@ func (r *Termvectors) VersionType(versiontype versiontype.VersionType) *Termvect return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Termvectors) ErrorTrace(errortrace bool) *Termvectors { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Termvectors) FilterPath(filterpaths ...string) *Termvectors { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Termvectors) Human(human bool) *Termvectors { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Termvectors) Pretty(pretty bool) *Termvectors { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Doc An artificial document (a document not present in the index) for which you // want to retrieve term vectors. // API name: doc @@ -445,7 +489,7 @@ func (r *Termvectors) VersionType(versiontype versiontype.VersionType) *Termvect // doc should be a json.RawMessage or a structure // if a structure is provided, the client will defer a json serialization // prior to sending the payload to Elasticsearch. -func (r *Termvectors) Doc(doc interface{}) *Termvectors { +func (r *Termvectors) Doc(doc any) *Termvectors { switch casted := doc.(type) { case json.RawMessage: r.req.Doc = casted diff --git a/typedapi/core/update/request.go b/typedapi/core/update/request.go index 954d95cafe..791ce14fc4 100644 --- a/typedapi/core/update/request.go +++ b/typedapi/core/update/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package update @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package update // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/update/UpdateRequest.ts#L38-L151 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/update/UpdateRequest.ts#L38-L151 type Request struct { // DetectNoop Set to false to disable setting 'result' in the response @@ -60,6 +60,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -90,7 +91,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "detect_noop": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -109,7 +110,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "doc_as_upsert": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -159,7 +160,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "scripted_upsert": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/core/update/response.go b/typedapi/core/update/response.go index b64a23e4ed..113bd0d2bd 100644 --- a/typedapi/core/update/response.go +++ b/typedapi/core/update/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package update @@ -27,15 +27,15 @@ import ( // Response holds the response body struct for the package update // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/update/UpdateResponse.ts#L27-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/update/UpdateResponse.ts#L27-L29 type Response struct { ForcedRefresh *bool `json:"forced_refresh,omitempty"` Get *types.InlineGet `json:"get,omitempty"` Id_ string `json:"_id"` Index_ string `json:"_index"` - PrimaryTerm_ int64 `json:"_primary_term"` + PrimaryTerm_ *int64 `json:"_primary_term,omitempty"` Result result.Result `json:"result"` - SeqNo_ int64 `json:"_seq_no"` + SeqNo_ *int64 `json:"_seq_no,omitempty"` Shards_ types.ShardStatistics `json:"_shards"` Version_ int64 `json:"_version"` } diff --git a/typedapi/core/update/update.go b/typedapi/core/update/update.go index de0f62a075..ceaefe5b7f 100644 --- a/typedapi/core/update/update.go +++ b/typedapi/core/update/update.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Updates a document with a script or partial document. package update @@ -432,6 +432,50 @@ func (r *Update) SourceIncludes_(fields ...string) *Update { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Update) ErrorTrace(errortrace bool) *Update { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Update) FilterPath(filterpaths ...string) *Update { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Update) Human(human bool) *Update { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Update) Pretty(pretty bool) *Update { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // DetectNoop Set to false to disable setting 'result' in the response // to 'noop' if no change to the document occurred. // API name: detect_noop @@ -447,7 +491,7 @@ func (r *Update) DetectNoop(detectnoop bool) *Update { // doc should be a json.RawMessage or a structure // if a structure is provided, the client will defer a json serialization // prior to sending the payload to Elasticsearch. -func (r *Update) Doc(doc interface{}) *Update { +func (r *Update) Doc(doc any) *Update { switch casted := doc.(type) { case json.RawMessage: r.req.Doc = casted @@ -507,7 +551,7 @@ func (r *Update) Source_(sourceconfig types.SourceConfig) *Update { // upsert should be a json.RawMessage or a structure // if a structure is provided, the client will defer a json serialization // prior to sending the payload to Elasticsearch. -func (r *Update) Upsert(upsert interface{}) *Update { +func (r *Update) Upsert(upsert any) *Update { switch casted := upsert.(type) { case json.RawMessage: r.req.Upsert = casted diff --git a/typedapi/core/updatebyquery/request.go b/typedapi/core/updatebyquery/request.go index 5518c190f1..39129c8af1 100644 --- a/typedapi/core/updatebyquery/request.go +++ b/typedapi/core/updatebyquery/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatebyquery @@ -34,7 +34,7 @@ import ( // Request holds the request body struct for the package updatebyquery // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/update_by_query/UpdateByQueryRequest.ts#L37-L221 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/update_by_query/UpdateByQueryRequest.ts#L37-L221 type Request struct { // Conflicts What to do if update by query hits version conflicts: `abort` or `proceed`. @@ -53,6 +53,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -88,7 +89,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "max_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/core/updatebyquery/response.go b/typedapi/core/updatebyquery/response.go index 88e38143fa..ff21065b96 100644 --- a/typedapi/core/updatebyquery/response.go +++ b/typedapi/core/updatebyquery/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatebyquery @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatebyquery // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/update_by_query/UpdateByQueryResponse.ts#L26-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/update_by_query/UpdateByQueryResponse.ts#L26-L45 type Response struct { Batches *int64 `json:"batches,omitempty"` Deleted *int64 `json:"deleted,omitempty"` diff --git a/typedapi/core/updatebyquery/update_by_query.go b/typedapi/core/updatebyquery/update_by_query.go index d7dc8aa983..81f0e9f81d 100644 --- a/typedapi/core/updatebyquery/update_by_query.go +++ b/typedapi/core/updatebyquery/update_by_query.go @@ -16,14 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Updates documents that match the specified query. If no query is specified, -// -// performs an update on every document in the index without changing the -// -// source, -// for example to pick up a mapping change. +// Updates documents that match the specified query. +// If no query is specified, performs an update on every document in the data +// stream or index without modifying the source, which is useful for picking up +// mapping changes. package updatebyquery import ( @@ -90,12 +88,10 @@ func NewUpdateByQueryFunc(tp elastictransport.Interface) NewUpdateByQuery { } } -// Updates documents that match the specified query. If no query is specified, -// -// performs an update on every document in the index without changing the -// -// source, -// for example to pick up a mapping change. +// Updates documents that match the specified query. +// If no query is specified, performs an update on every document in the data +// stream or index without modifying the source, which is useful for picking up +// mapping changes. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-update-by-query.html func New(tp elastictransport.Interface) *UpdateByQuery { @@ -595,6 +591,50 @@ func (r *UpdateByQuery) WaitForCompletion(waitforcompletion bool) *UpdateByQuery return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *UpdateByQuery) ErrorTrace(errortrace bool) *UpdateByQuery { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *UpdateByQuery) FilterPath(filterpaths ...string) *UpdateByQuery { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *UpdateByQuery) Human(human bool) *UpdateByQuery { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *UpdateByQuery) Pretty(pretty bool) *UpdateByQuery { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Conflicts What to do if update by query hits version conflicts: `abort` or `proceed`. // API name: conflicts func (r *UpdateByQuery) Conflicts(conflicts conflicts.Conflicts) *UpdateByQuery { diff --git a/typedapi/core/updatebyqueryrethrottle/response.go b/typedapi/core/updatebyqueryrethrottle/response.go index db968c0082..5be1ad1225 100644 --- a/typedapi/core/updatebyqueryrethrottle/response.go +++ b/typedapi/core/updatebyqueryrethrottle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatebyqueryrethrottle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatebyqueryrethrottle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/update_by_query_rethrottle/UpdateByQueryRethrottleResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/update_by_query_rethrottle/UpdateByQueryRethrottleResponse.ts#L23-L25 type Response struct { Nodes map[string]types.UpdateByQueryRethrottleNode `json:"nodes"` } diff --git a/typedapi/core/updatebyqueryrethrottle/update_by_query_rethrottle.go b/typedapi/core/updatebyqueryrethrottle/update_by_query_rethrottle.go index 835bfba6bd..cd06326f58 100644 --- a/typedapi/core/updatebyqueryrethrottle/update_by_query_rethrottle.go +++ b/typedapi/core/updatebyqueryrethrottle/update_by_query_rethrottle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Changes the number of requests per second for a particular Update By Query // operation. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r UpdateByQueryRethrottle) IsSuccess(providedCtx context.Context) (bool, e if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -308,3 +308,47 @@ func (r *UpdateByQueryRethrottle) RequestsPerSecond(requestspersecond string) *U return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *UpdateByQueryRethrottle) ErrorTrace(errortrace bool) *UpdateByQueryRethrottle { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *UpdateByQueryRethrottle) FilterPath(filterpaths ...string) *UpdateByQueryRethrottle { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *UpdateByQueryRethrottle) Human(human bool) *UpdateByQueryRethrottle { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *UpdateByQueryRethrottle) Pretty(pretty bool) *UpdateByQueryRethrottle { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/danglingindices/deletedanglingindex/delete_dangling_index.go b/typedapi/danglingindices/deletedanglingindex/delete_dangling_index.go index e9032dbb77..251b72754f 100644 --- a/typedapi/danglingindices/deletedanglingindex/delete_dangling_index.go +++ b/typedapi/danglingindices/deletedanglingindex/delete_dangling_index.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes the specified dangling index package deletedanglingindex @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -261,7 +260,7 @@ func (r DeleteDanglingIndex) IsSuccess(providedCtx context.Context) (bool, error if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -321,3 +320,47 @@ func (r *DeleteDanglingIndex) Timeout(duration string) *DeleteDanglingIndex { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteDanglingIndex) ErrorTrace(errortrace bool) *DeleteDanglingIndex { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteDanglingIndex) FilterPath(filterpaths ...string) *DeleteDanglingIndex { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteDanglingIndex) Human(human bool) *DeleteDanglingIndex { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteDanglingIndex) Pretty(pretty bool) *DeleteDanglingIndex { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/danglingindices/deletedanglingindex/response.go b/typedapi/danglingindices/deletedanglingindex/response.go index 75b58c32b8..d8d9bc9855 100644 --- a/typedapi/danglingindices/deletedanglingindex/response.go +++ b/typedapi/danglingindices/deletedanglingindex/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletedanglingindex // Response holds the response body struct for the package deletedanglingindex // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/dangling_indices/delete_dangling_index/DeleteDanglingIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/dangling_indices/delete_dangling_index/DeleteDanglingIndexResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/danglingindices/importdanglingindex/import_dangling_index.go b/typedapi/danglingindices/importdanglingindex/import_dangling_index.go index 02ce1c1076..e561221915 100644 --- a/typedapi/danglingindices/importdanglingindex/import_dangling_index.go +++ b/typedapi/danglingindices/importdanglingindex/import_dangling_index.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Imports the specified dangling index package importdanglingindex @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -261,7 +260,7 @@ func (r ImportDanglingIndex) IsSuccess(providedCtx context.Context) (bool, error if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -321,3 +320,47 @@ func (r *ImportDanglingIndex) Timeout(duration string) *ImportDanglingIndex { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ImportDanglingIndex) ErrorTrace(errortrace bool) *ImportDanglingIndex { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ImportDanglingIndex) FilterPath(filterpaths ...string) *ImportDanglingIndex { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ImportDanglingIndex) Human(human bool) *ImportDanglingIndex { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ImportDanglingIndex) Pretty(pretty bool) *ImportDanglingIndex { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/danglingindices/importdanglingindex/response.go b/typedapi/danglingindices/importdanglingindex/response.go index 92623c31fe..6b549efbed 100644 --- a/typedapi/danglingindices/importdanglingindex/response.go +++ b/typedapi/danglingindices/importdanglingindex/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package importdanglingindex // Response holds the response body struct for the package importdanglingindex // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/dangling_indices/import_dangling_index/ImportDanglingIndexResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/dangling_indices/import_dangling_index/ImportDanglingIndexResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/danglingindices/listdanglingindices/list_dangling_indices.go b/typedapi/danglingindices/listdanglingindices/list_dangling_indices.go index f50a638220..105b969f7b 100644 --- a/typedapi/danglingindices/listdanglingindices/list_dangling_indices.go +++ b/typedapi/danglingindices/listdanglingindices/list_dangling_indices.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns all dangling indices. package listdanglingindices @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -246,7 +246,7 @@ func (r ListDanglingIndices) IsSuccess(providedCtx context.Context) (bool, error if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -273,3 +273,47 @@ func (r *ListDanglingIndices) Header(key, value string) *ListDanglingIndices { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ListDanglingIndices) ErrorTrace(errortrace bool) *ListDanglingIndices { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ListDanglingIndices) FilterPath(filterpaths ...string) *ListDanglingIndices { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ListDanglingIndices) Human(human bool) *ListDanglingIndices { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ListDanglingIndices) Pretty(pretty bool) *ListDanglingIndices { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/danglingindices/listdanglingindices/response.go b/typedapi/danglingindices/listdanglingindices/response.go index da9be0835c..3f630d0535 100644 --- a/typedapi/danglingindices/listdanglingindices/response.go +++ b/typedapi/danglingindices/listdanglingindices/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package listdanglingindices @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package listdanglingindices // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/dangling_indices/list_dangling_indices/ListDanglingIndicesResponse.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/dangling_indices/list_dangling_indices/ListDanglingIndicesResponse.ts#L23-L27 type Response struct { DanglingIndices []types.DanglingIndex `json:"dangling_indices"` } diff --git a/typedapi/enrich/deletepolicy/delete_policy.go b/typedapi/enrich/deletepolicy/delete_policy.go index 3727c1f34a..4d28069fa6 100644 --- a/typedapi/enrich/deletepolicy/delete_policy.go +++ b/typedapi/enrich/deletepolicy/delete_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes an existing enrich policy and its enrich index. package deletepolicy @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -262,7 +262,7 @@ func (r DeletePolicy) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -298,3 +298,47 @@ func (r *DeletePolicy) _name(name string) *DeletePolicy { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeletePolicy) ErrorTrace(errortrace bool) *DeletePolicy { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeletePolicy) FilterPath(filterpaths ...string) *DeletePolicy { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeletePolicy) Human(human bool) *DeletePolicy { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeletePolicy) Pretty(pretty bool) *DeletePolicy { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/enrich/deletepolicy/response.go b/typedapi/enrich/deletepolicy/response.go index 1642583a16..334d62f98c 100644 --- a/typedapi/enrich/deletepolicy/response.go +++ b/typedapi/enrich/deletepolicy/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletepolicy // Response holds the response body struct for the package deletepolicy // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/enrich/delete_policy/DeleteEnrichPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/enrich/delete_policy/DeleteEnrichPolicyResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/enrich/executepolicy/execute_policy.go b/typedapi/enrich/executepolicy/execute_policy.go index 955378ded8..6a15bb9151 100644 --- a/typedapi/enrich/executepolicy/execute_policy.go +++ b/typedapi/enrich/executepolicy/execute_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates the enrich index for an existing enrich policy. package executepolicy @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -265,7 +264,7 @@ func (r ExecutePolicy) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -310,3 +309,47 @@ func (r *ExecutePolicy) WaitForCompletion(waitforcompletion bool) *ExecutePolicy return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ExecutePolicy) ErrorTrace(errortrace bool) *ExecutePolicy { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ExecutePolicy) FilterPath(filterpaths ...string) *ExecutePolicy { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ExecutePolicy) Human(human bool) *ExecutePolicy { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ExecutePolicy) Pretty(pretty bool) *ExecutePolicy { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/enrich/executepolicy/response.go b/typedapi/enrich/executepolicy/response.go index ca999ec235..928e8b72d9 100644 --- a/typedapi/enrich/executepolicy/response.go +++ b/typedapi/enrich/executepolicy/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package executepolicy @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package executepolicy // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/enrich/execute_policy/ExecuteEnrichPolicyResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/enrich/execute_policy/ExecuteEnrichPolicyResponse.ts#L23-L28 type Response struct { Status *types.ExecuteEnrichPolicyStatus `json:"status,omitempty"` TaskId types.TaskId `json:"task_id,omitempty"` diff --git a/typedapi/enrich/getpolicy/get_policy.go b/typedapi/enrich/getpolicy/get_policy.go index a33d418cc6..5a7e76dc60 100644 --- a/typedapi/enrich/getpolicy/get_policy.go +++ b/typedapi/enrich/getpolicy/get_policy.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Gets information about an enrich policy. +// Returns information about an enrich policy. package getpolicy import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -74,7 +74,7 @@ func NewGetPolicyFunc(tp elastictransport.Interface) NewGetPolicy { } } -// Gets information about an enrich policy. +// Returns information about an enrich policy. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-enrich-policy-api.html func New(tp elastictransport.Interface) *GetPolicy { @@ -267,7 +267,7 @@ func (r GetPolicy) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -304,3 +304,47 @@ func (r *GetPolicy) Name(name string) *GetPolicy { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetPolicy) ErrorTrace(errortrace bool) *GetPolicy { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetPolicy) FilterPath(filterpaths ...string) *GetPolicy { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetPolicy) Human(human bool) *GetPolicy { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetPolicy) Pretty(pretty bool) *GetPolicy { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/enrich/getpolicy/response.go b/typedapi/enrich/getpolicy/response.go index 1730088ebd..9fc4f04af9 100644 --- a/typedapi/enrich/getpolicy/response.go +++ b/typedapi/enrich/getpolicy/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getpolicy @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/enrich/get_policy/GetEnrichPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/enrich/get_policy/GetEnrichPolicyResponse.ts#L22-L24 type Response struct { Policies []types.Summary `json:"policies"` } diff --git a/typedapi/enrich/putpolicy/put_policy.go b/typedapi/enrich/putpolicy/put_policy.go index 03568be2be..21b9464699 100644 --- a/typedapi/enrich/putpolicy/put_policy.go +++ b/typedapi/enrich/putpolicy/put_policy.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates a new enrich policy. +// Creates an enrich policy. package putpolicy import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -80,7 +81,7 @@ func NewPutPolicyFunc(tp elastictransport.Interface) NewPutPolicy { } } -// Creates a new enrich policy. +// Creates an enrich policy. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-enrich-policy-api.html func New(tp elastictransport.Interface) *PutPolicy { @@ -314,6 +315,50 @@ func (r *PutPolicy) _name(name string) *PutPolicy { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutPolicy) ErrorTrace(errortrace bool) *PutPolicy { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutPolicy) FilterPath(filterpaths ...string) *PutPolicy { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutPolicy) Human(human bool) *PutPolicy { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutPolicy) Pretty(pretty bool) *PutPolicy { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // GeoMatch Matches enrich data to incoming documents based on a `geo_shape` query. // API name: geo_match func (r *PutPolicy) GeoMatch(geomatch *types.EnrichPolicy) *PutPolicy { diff --git a/typedapi/enrich/putpolicy/request.go b/typedapi/enrich/putpolicy/request.go index 60ee8d6c4d..8a7ec3793d 100644 --- a/typedapi/enrich/putpolicy/request.go +++ b/typedapi/enrich/putpolicy/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putpolicy @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/enrich/put_policy/PutEnrichPolicyRequest.ts#L24-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/enrich/put_policy/PutEnrichPolicyRequest.ts#L24-L52 type Request struct { // GeoMatch Matches enrich data to incoming documents based on a `geo_shape` query. @@ -44,6 +44,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/enrich/putpolicy/response.go b/typedapi/enrich/putpolicy/response.go index f4c514ce16..f956f8cd2b 100644 --- a/typedapi/enrich/putpolicy/response.go +++ b/typedapi/enrich/putpolicy/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putpolicy // Response holds the response body struct for the package putpolicy // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/enrich/put_policy/PutEnrichPolicyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/enrich/put_policy/PutEnrichPolicyResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/enrich/stats/response.go b/typedapi/enrich/stats/response.go index 621b4cd634..9451019e95 100644 --- a/typedapi/enrich/stats/response.go +++ b/typedapi/enrich/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/enrich/stats/EnrichStatsResponse.ts#L22-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/enrich/stats/EnrichStatsResponse.ts#L22-L39 type Response struct { // CacheStats Objects containing information about the enrich cache stats on each ingest diff --git a/typedapi/enrich/stats/stats.go b/typedapi/enrich/stats/stats.go index 4d30d31053..a67050aa7a 100644 --- a/typedapi/enrich/stats/stats.go +++ b/typedapi/enrich/stats/stats.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Gets enrich coordinator statistics and information about enrich policies that -// are currently executing. +// Returns enrich coordinator statistics and information about enrich policies +// that are currently executing. package stats import ( @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -69,8 +69,8 @@ func NewStatsFunc(tp elastictransport.Interface) NewStats { } } -// Gets enrich coordinator statistics and information about enrich policies that -// are currently executing. +// Returns enrich coordinator statistics and information about enrich policies +// that are currently executing. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/enrich-stats-api.html func New(tp elastictransport.Interface) *Stats { @@ -250,7 +250,7 @@ func (r Stats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -277,3 +277,47 @@ func (r *Stats) Header(key, value string) *Stats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Stats) ErrorTrace(errortrace bool) *Stats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Stats) FilterPath(filterpaths ...string) *Stats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Stats) Human(human bool) *Stats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Stats) Pretty(pretty bool) *Stats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/eql/delete/delete.go b/typedapi/eql/delete/delete.go index 807d81a2f3..ed6b6e5c1e 100644 --- a/typedapi/eql/delete/delete.go +++ b/typedapi/eql/delete/delete.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes an async EQL search by ID. If the search is still running, the search -// request will be cancelled. Otherwise, the saved search results are deleted. +// Deletes an async EQL search or a stored synchronous EQL search. +// The API also deletes results for the search. package delete import ( @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -77,8 +77,8 @@ func NewDeleteFunc(tp elastictransport.Interface) NewDelete { } } -// Deletes an async EQL search by ID. If the search is still running, the search -// request will be cancelled. Otherwise, the saved search results are deleted. +// Deletes an async EQL search or a stored synchronous EQL search. +// The API also deletes results for the search. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/eql-search-api.html func New(tp elastictransport.Interface) *Delete { @@ -264,7 +264,7 @@ func (r Delete) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -303,3 +303,47 @@ func (r *Delete) _id(id string) *Delete { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Delete) ErrorTrace(errortrace bool) *Delete { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Delete) FilterPath(filterpaths ...string) *Delete { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Delete) Human(human bool) *Delete { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Delete) Pretty(pretty bool) *Delete { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/eql/delete/response.go b/typedapi/eql/delete/response.go index 9123df8007..792a5c7091 100644 --- a/typedapi/eql/delete/response.go +++ b/typedapi/eql/delete/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package delete // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/eql/delete/EqlDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/eql/delete/EqlDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/eql/get/get.go b/typedapi/eql/get/get.go index a1dea314ad..1e1fa4b0be 100644 --- a/typedapi/eql/get/get.go +++ b/typedapi/eql/get/get.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns async results from previously executed Event Query Language (EQL) -// search +// Returns the current status and available results for an async EQL search or a +// stored synchronous EQL search. package get import ( @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -77,8 +77,8 @@ func NewGetFunc(tp elastictransport.Interface) NewGet { } } -// Returns async results from previously executed Event Query Language (EQL) -// search +// Returns the current status and available results for an async EQL search or a +// stored synchronous EQL search. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-async-eql-search-api.html func New(tp elastictransport.Interface) *Get { @@ -264,7 +264,7 @@ func (r Get) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -319,3 +319,47 @@ func (r *Get) WaitForCompletionTimeout(duration string) *Get { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Get) ErrorTrace(errortrace bool) *Get { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Get) FilterPath(filterpaths ...string) *Get { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Get) Human(human bool) *Get { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Get) Pretty(pretty bool) *Get { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/eql/get/response.go b/typedapi/eql/get/response.go index b813f4aef5..6623d2a8e2 100644 --- a/typedapi/eql/get/response.go +++ b/typedapi/eql/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/eql/get/EqlGetResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/eql/get/EqlGetResponse.ts#L22-L24 type Response struct { // Hits Contains matching events and sequences. Also contains related metadata. diff --git a/typedapi/eql/getstatus/get_status.go b/typedapi/eql/getstatus/get_status.go index 7b62b394c3..9f773b7f42 100644 --- a/typedapi/eql/getstatus/get_status.go +++ b/typedapi/eql/getstatus/get_status.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns the status of a previously submitted async or stored Event Query -// Language (EQL) search +// Returns the current status for an async EQL search or a stored synchronous +// EQL search without returning results. package getstatus import ( @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -77,8 +77,8 @@ func NewGetStatusFunc(tp elastictransport.Interface) NewGetStatus { } } -// Returns the status of a previously submitted async or stored Event Query -// Language (EQL) search +// Returns the current status for an async EQL search or a stored synchronous +// EQL search without returning results. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-async-eql-status-api.html func New(tp elastictransport.Interface) *GetStatus { @@ -266,7 +266,7 @@ func (r GetStatus) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -302,3 +302,47 @@ func (r *GetStatus) _id(id string) *GetStatus { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetStatus) ErrorTrace(errortrace bool) *GetStatus { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetStatus) FilterPath(filterpaths ...string) *GetStatus { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetStatus) Human(human bool) *GetStatus { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetStatus) Pretty(pretty bool) *GetStatus { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/eql/getstatus/response.go b/typedapi/eql/getstatus/response.go index a03eba14e1..5975497a9e 100644 --- a/typedapi/eql/getstatus/response.go +++ b/typedapi/eql/getstatus/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getstatus // Response holds the response body struct for the package getstatus // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/eql/get_status/EqlGetStatusResponse.ts#L24-L51 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/eql/get_status/EqlGetStatusResponse.ts#L24-L51 type Response struct { // CompletionStatus For a completed search shows the http status code of the completed search. diff --git a/typedapi/eql/search/request.go b/typedapi/eql/search/request.go index 2433634add..266645c1ff 100644 --- a/typedapi/eql/search/request.go +++ b/typedapi/eql/search/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package search @@ -34,7 +34,7 @@ import ( // Request holds the request body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/eql/search/EqlSearchRequest.ts#L28-L118 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/eql/search/EqlSearchRequest.ts#L28-L118 type Request struct { CaseSensitive *bool `json:"case_sensitive,omitempty"` // EventCategoryField Field containing the event classification, such as process, file, or network. @@ -66,6 +66,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -96,7 +97,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "case_sensitive": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -157,7 +158,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "keep_on_completion": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/eql/search/response.go b/typedapi/eql/search/response.go index 053f75544a..04dbd2146f 100644 --- a/typedapi/eql/search/response.go +++ b/typedapi/eql/search/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package search @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/eql/search/EqlSearchResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/eql/search/EqlSearchResponse.ts#L22-L24 type Response struct { // Hits Contains matching events and sequences. Also contains related metadata. diff --git a/typedapi/eql/search/search.go b/typedapi/eql/search/search.go index e4671aae16..bfb10003af 100644 --- a/typedapi/eql/search/search.go +++ b/typedapi/eql/search/search.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns results matching a query expressed in Event Query Language (EQL) package search @@ -343,6 +343,50 @@ func (r *Search) IgnoreUnavailable(ignoreunavailable bool) *Search { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Search) ErrorTrace(errortrace bool) *Search { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Search) FilterPath(filterpaths ...string) *Search { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Search) Human(human bool) *Search { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Search) Pretty(pretty bool) *Search { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: case_sensitive func (r *Search) CaseSensitive(casesensitive bool) *Search { r.req.CaseSensitive = &casesensitive diff --git a/typedapi/esql/asyncquery/async_query.go b/typedapi/esql/asyncquery/async_query.go new file mode 100644 index 0000000000..e5be9fc83f --- /dev/null +++ b/typedapi/esql/asyncquery/async_query.go @@ -0,0 +1,231 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Executes an ESQL request asynchronously +package asyncquery + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type AsyncQuery struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + raw io.Reader + + paramSet int + + spanStarted bool + + instrument elastictransport.Instrumentation +} + +// NewAsyncQuery type alias for index. +type NewAsyncQuery func() *AsyncQuery + +// NewAsyncQueryFunc returns a new instance of AsyncQuery with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewAsyncQueryFunc(tp elastictransport.Interface) NewAsyncQuery { + return func() *AsyncQuery { + n := New(tp) + + return n + } +} + +// Executes an ESQL request asynchronously +// +// https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-async-query-api.html +func New(tp elastictransport.Interface) *AsyncQuery { + r := &AsyncQuery{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + } + + if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { + if instrument := instrumented.InstrumentationEnabled(); instrument != nil { + r.instrument = instrument + } + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *AsyncQuery) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == 0: + path.WriteString("/") + path.WriteString("_query") + path.WriteString("/") + path.WriteString("async") + + method = http.MethodPost + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.raw) + } else { + req, err = http.NewRequest(method, r.path.String(), r.raw) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Content-Type") == "" { + if r.raw != nil { + req.Header.Set("Content-Type", "application/vnd.elasticsearch+json;compatible-with=8") + } + } + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r AsyncQuery) Perform(providedCtx context.Context) (*http.Response, error) { + var ctx context.Context + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + if r.spanStarted == false { + ctx := instrument.Start(providedCtx, "esql.async_query") + defer instrument.Close(ctx) + } + } + if ctx == nil { + ctx = providedCtx + } + + req, err := r.HttpRequest(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.BeforeRequest(req, "esql.async_query") + if reader := instrument.RecordRequestBody(ctx, "esql.async_query", r.raw); reader != nil { + req.Body = reader + } + } + res, err := r.transport.Perform(req) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "esql.async_query") + } + if err != nil { + localErr := fmt.Errorf("an error happened during the AsyncQuery query execution: %w", err) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, localErr) + } + return nil, localErr + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a asyncquery.Response +func (r AsyncQuery) Do(ctx context.Context) (bool, error) { + return r.IsSuccess(ctx) +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r AsyncQuery) IsSuccess(providedCtx context.Context) (bool, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "esql.async_query") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(io.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + if res.StatusCode != 404 { + err := fmt.Errorf("an error happened during the AsyncQuery query execution, status code: %d", res.StatusCode) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return false, err + } + + return false, nil +} + +// Header set a key, value pair in the AsyncQuery headers map. +func (r *AsyncQuery) Header(key, value string) *AsyncQuery { + r.headers.Set(key, value) + + return r +} diff --git a/typedapi/esql/query/helpers.go b/typedapi/esql/query/helpers.go new file mode 100644 index 0000000000..bde2cb2c4e --- /dev/null +++ b/typedapi/esql/query/helpers.go @@ -0,0 +1,191 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package query + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" +) + +type metadata struct { + Name string `json:"name"` + Type string `json:"type"` +} + +type esqlResponse struct { + Columns []metadata `json:"columns"` + Values [][]any `json:"values"` +} + +// Helper takes a generic type T, a context.Context and an esql.Query request. +// Returns an array of T using the json.Unmarshaler of the type. +func Helper[T any](ctx context.Context, esqlQuery *Query) ([]T, error) { + response, err := esqlQuery. + Columnar(false). + Format("json"). + Header("x-elastic-client-meta", "h=qo"). + Do(ctx) + if err != nil { + return nil, err + } + + var eR esqlResponse + err = json.Unmarshal(response, &eR) + if err != nil { + return nil, fmt.Errorf("cannot read ES|QL response: %w", err) + } + + buf := bytes.NewBuffer(nil) + buf.WriteByte('[') + for rowNum, row := range eR.Values { + buf.WriteByte('{') + for i := 0; i < len(row); i++ { + buf.WriteString(`"` + eR.Columns[i].Name + `":`) + data, err := json.Marshal(row[i]) + if err != nil { + return nil, fmt.Errorf("error while parsing ES|QL response: %w", err) + } + buf.Write(data) + if i != len(row)-1 { + buf.WriteByte(',') + } + } + buf.WriteByte('}') + if rowNum != len(eR.Values)-1 { + buf.WriteByte(',') + } + } + buf.WriteByte(']') + + target := []T{} + err = json.Unmarshal(buf.Bytes(), &target) + if err != nil { + return nil, fmt.Errorf("cannot deserialize ES|QL response: %w", err) + } + + return target, nil +} + +type EsqlIterator[T any] interface { + Next() (*T, error) + More() bool +} + +type iterator[T any] struct { + reader []byte + decoder *json.Decoder + keys []string + skipComma bool +} + +func (d iterator[T]) More() bool { + return d.decoder.More() +} + +func (d iterator[T]) Next() (*T, error) { + var t T + var tmp []any + + if d.skipComma { + d.decoder.Token() + } + + err := d.decoder.Decode(&tmp) + if err != nil { + return nil, err + } + + buf := bytes.Buffer{} + + buf.WriteByte('{') + for index, key := range d.keys { + buf.WriteString(`"` + key + `":`) + value, _ := json.Marshal(tmp[index]) + buf.Write(value) + + if index != len(d.keys)-1 { + buf.WriteByte(',') + } + } + buf.WriteByte('}') + + err = json.Unmarshal(buf.Bytes(), &t) + if err != nil { + return nil, err + } + + d.skipComma = true + return &t, nil +} + +// Helper takes a generic type T, a context.Context and an esql.Query request +// buffer the response and provides an API to consume one item at a time. +func NewIteratorHelper[T any](ctx context.Context, query *Query) (EsqlIterator[T], error) { + response, err := query. + Columnar(false). + Format("json"). + Header("x-elastic-client-meta", "h=qo"). + Perform(ctx) + if err != nil { + return nil, err + } + defer response.Body.Close() + + d := &iterator[T]{} + d.reader, err = io.ReadAll(response.Body) + d.decoder = json.NewDecoder(bytes.NewReader(d.reader)) + + var metas []metadata +OUTER: + for { + t, err := d.decoder.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return nil, err + } + switch t { + case "columns": + err := d.decoder.Decode(&metas) + if err != nil { + return nil, err + } + for _, m := range metas { + d.keys = append(d.keys, m.Name) + } + + case "values": + t, _ := d.decoder.Token() + if t != json.Delim(91) { + return nil, fmt.Errorf("cannot read response from ES|QL, expected ARRAY_START: %w", err) + } + break OUTER + } + } + + if err != nil { + return nil, fmt.Errorf("cannot read response from ES|QL: %w", err) + } + + return d, nil +} diff --git a/typedapi/esql/query/helpers_test.go b/typedapi/esql/query/helpers_test.go new file mode 100644 index 0000000000..675af09345 --- /dev/null +++ b/typedapi/esql/query/helpers_test.go @@ -0,0 +1,508 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package query + +import ( + "bytes" + "context" + "fmt" + "io" + "net/http" + "net/url" + "reflect" + "strings" + "testing" +) + +type Doc struct { + Author string `json:"author,omitempty"` + Name string `json:"name,omitempty"` + PageCount int `json:"page_count,omitempty"` + ReleaseDate string `json:"release_date,omitempty"` +} + +var testPayload = `{ + "columns": [ + { + "name": "author", + "type": "text" + }, + { + "name": "author.keyword", + "type": "keyword" + }, + { + "name": "name", + "type": "text" + }, + { + "name": "name.keyword", + "type": "keyword" + }, + { + "name": "page_count", + "type": "long" + }, + { + "name": "release_date", + "type": "date" + } + ], + "values": [ + [ + "James S.A. Corey", + "James S.A. Corey", + "Leviathan Wakes", + "Leviathan Wakes", + 561, + "2011-06-02T00:00:00.000Z" + ], + [ + "Dan Simmons", + "Dan Simmons", + "Hyperion", + "Hyperion", + 482, + "1989-05-26T00:00:00.000Z" + ], + [ + "Frank Herbert", + "Frank Herbert", + "Dune", + "Dune", + 604, + "1965-06-01T00:00:00.000Z" + ], + [ + "Frank Herbert", + "Frank Herbert", + "Dune Messiah", + "Dune Messiah", + 331, + "1969-10-15T00:00:00.000Z" + ], + [ + "Frank Herbert", + "Frank Herbert", + "Children of Dune", + "Children of Dune", + 408, + "1976-04-21T00:00:00.000Z" + ], + [ + "Frank Herbert", + "Frank Herbert", + "God Emperor of Dune", + "God Emperor of Dune", + 454, + "1981-05-28T00:00:00.000Z" + ], + [ + "Iain M. Banks", + "Iain M. Banks", + "Consider Phlebas", + "Consider Phlebas", + 471, + "1987-04-23T00:00:00.000Z" + ], + [ + "Peter F. Hamilton", + "Peter F. Hamilton", + "Pandora's Star", + "Pandora's Star", + 768, + "2004-03-02T00:00:00.000Z" + ], + [ + "Alastair Reynolds", + "Alastair Reynolds", + "Revelation Space", + "Revelation Space", + 585, + "2000-03-15T00:00:00.000Z" + ], + [ + "Vernor Vinge", + "Vernor Vinge", + "A Fire Upon the Deep", + "A Fire Upon the Deep", + 613, + "1992-06-01T00:00:00.000Z" + ], + [ + "Orson Scott Card", + "Orson Scott Card", + "Ender's Game", + "Ender's Game", + 324, + "1985-06-01T00:00:00.000Z" + ], + [ + "George Orwell", + "George Orwell", + "1984", + "1984", + 328, + "1985-06-01T00:00:00.000Z" + ], + [ + "Ray Bradbury", + "Ray Bradbury", + "Fahrenheit 451", + "Fahrenheit 451", + 227, + "1953-10-15T00:00:00.000Z" + ], + [ + "Aldous Huxley", + "Aldous Huxley", + "Brave New World", + "Brave New World", + 268, + "1932-06-01T00:00:00.000Z" + ], + [ + "Isaac Asimov", + "Isaac Asimov", + "Foundation", + "Foundation", + 224, + "1951-06-01T00:00:00.000Z" + ], + [ + "Lois Lowry", + "Lois Lowry", + "The Giver", + "The Giver", + 208, + "1993-04-26T00:00:00.000Z" + ], + [ + "Kurt Vonnegut", + "Kurt Vonnegut", + "Slaughterhouse-Five", + "Slaughterhouse-Five", + 275, + "1969-06-01T00:00:00.000Z" + ], + [ + "Douglas Adams", + "Douglas Adams", + "The Hitchhiker's Guide to the Galaxy", + "The Hitchhiker's Guide to the Galaxy", + 180, + "1979-10-12T00:00:00.000Z" + ], + [ + "Neal Stephenson", + "Neal Stephenson", + "Snow Crash", + "Snow Crash", + 470, + "1992-06-01T00:00:00.000Z" + ], + [ + "William Gibson", + "William Gibson", + "Neuromancer", + "Neuromancer", + 271, + "1984-07-01T00:00:00.000Z" + ], + [ + "Margaret Atwood", + "Margaret Atwood", + "The Handmaid's Tale", + "The Handmaid's Tale", + 311, + "1985-06-01T00:00:00.000Z" + ], + [ + "Robert A. Heinlein", + "Robert A. Heinlein", + "Starship Troopers", + "Starship Troopers", + 335, + "1959-12-01T00:00:00.000Z" + ], + [ + "Ursula K. Le Guin", + "Ursula K. Le Guin", + "The Left Hand of Darkness", + "The Left Hand of Darkness", + 304, + "1969-06-01T00:00:00.000Z" + ], + [ + "Robert A. Heinlein", + "Robert A. Heinlein", + "The Moon is a Harsh Mistress", + "The Moon is a Harsh Mistress", + 288, + "1966-04-01T00:00:00.000Z" + ] + ] +}` + +type mockTransp struct { + RoundTripFunc func(*http.Request) (*http.Response, error) +} + +var successfullRoundTripFunc = func(*http.Request) (*http.Response, error) { + res := &http.Response{} + res.Header = http.Header{} + res.Header.Add("Content-Type", "application/json") + res.Body = io.NopCloser(strings.NewReader(testPayload)) + + return res, nil +} + +var badPayloadRoundTripFunc = func(*http.Request) (*http.Response, error) { + res := &http.Response{} + res.Header = http.Header{} + res.Header.Add("Content-Type", "application/json") + res.Body = io.NopCloser(strings.NewReader(`{ "columns":`)) + + return res, nil +} + +var customErrorRoundTripFunc = func(*http.Request) (*http.Response, error) { + return nil, fmt.Errorf("something really bad happened") +} + +func (t mockTransp) Perform(request *http.Request) (*http.Response, error) { + if t.RoundTripFunc == nil { + return successfullRoundTripFunc(request) + } + return t.RoundTripFunc(request) +} + +func TestHelper(t *testing.T) { + type args struct { + ctx context.Context + esqlQuery *Query + } + type testCase[T any] struct { + name string + args args + want []T + wantErr bool + } + tests := []testCase[Doc]{ + { + name: "Simple deserialization", + args: args{ + ctx: context.Background(), + esqlQuery: &Query{ + transport: mockTransp{}, + values: make(url.Values), + headers: make(http.Header), + buf: bytes.NewBuffer(nil), + req: &Request{ + Query: `FROM docs`, + }, + }, + }, + want: []Doc{ + {Author: "James S.A. Corey", Name: "Leviathan Wakes", PageCount: 561, ReleaseDate: "2011-06-02T00:00:00.000Z"}, + {Author: "Dan Simmons", Name: "Hyperion", PageCount: 482, ReleaseDate: "1989-05-26T00:00:00.000Z"}, + {Author: "Frank Herbert", Name: "Dune", PageCount: 604, ReleaseDate: "1965-06-01T00:00:00.000Z"}, + {Author: "Frank Herbert", Name: "Dune Messiah", PageCount: 331, ReleaseDate: "1969-10-15T00:00:00.000Z"}, + {Author: "Frank Herbert", Name: "Children of Dune", PageCount: 408, ReleaseDate: "1976-04-21T00:00:00.000Z"}, + {Author: "Frank Herbert", Name: "God Emperor of Dune", PageCount: 454, ReleaseDate: "1981-05-28T00:00:00.000Z"}, + {Author: "Iain M. Banks", Name: "Consider Phlebas", PageCount: 471, ReleaseDate: "1987-04-23T00:00:00.000Z"}, + {Author: "Peter F. Hamilton", Name: "Pandora's Star", PageCount: 768, ReleaseDate: "2004-03-02T00:00:00.000Z"}, + {Author: "Alastair Reynolds", Name: "Revelation Space", PageCount: 585, ReleaseDate: "2000-03-15T00:00:00.000Z"}, + {Author: "Vernor Vinge", Name: "A Fire Upon the Deep", PageCount: 613, ReleaseDate: "1992-06-01T00:00:00.000Z"}, + {Author: "Orson Scott Card", Name: "Ender's Game", PageCount: 324, ReleaseDate: "1985-06-01T00:00:00.000Z"}, + {Author: "George Orwell", Name: "1984", PageCount: 328, ReleaseDate: "1985-06-01T00:00:00.000Z"}, + {Author: "Ray Bradbury", Name: "Fahrenheit 451", PageCount: 227, ReleaseDate: "1953-10-15T00:00:00.000Z"}, + {Author: "Aldous Huxley", Name: "Brave New World", PageCount: 268, ReleaseDate: "1932-06-01T00:00:00.000Z"}, + {Author: "Isaac Asimov", Name: "Foundation", PageCount: 224, ReleaseDate: "1951-06-01T00:00:00.000Z"}, + {Author: "Lois Lowry", Name: "The Giver", PageCount: 208, ReleaseDate: "1993-04-26T00:00:00.000Z"}, + {Author: "Kurt Vonnegut", Name: "Slaughterhouse-Five", PageCount: 275, ReleaseDate: "1969-06-01T00:00:00.000Z"}, + {Author: "Douglas Adams", Name: "The Hitchhiker's Guide to the Galaxy", PageCount: 180, ReleaseDate: "1979-10-12T00:00:00.000Z"}, + {Author: "Neal Stephenson", Name: "Snow Crash", PageCount: 470, ReleaseDate: "1992-06-01T00:00:00.000Z"}, + {Author: "William Gibson", Name: "Neuromancer", PageCount: 271, ReleaseDate: "1984-07-01T00:00:00.000Z"}, + {Author: "Margaret Atwood", Name: "The Handmaid's Tale", PageCount: 311, ReleaseDate: "1985-06-01T00:00:00.000Z"}, + {Author: "Robert A. Heinlein", Name: "Starship Troopers", PageCount: 335, ReleaseDate: "1959-12-01T00:00:00.000Z"}, + {Author: "Ursula K. Le Guin", Name: "The Left Hand of Darkness", PageCount: 304, ReleaseDate: "1969-06-01T00:00:00.000Z"}, + {Author: "Robert A. Heinlein", Name: "The Moon is a Harsh Mistress", PageCount: 288, ReleaseDate: "1966-04-01T00:00:00.000Z"}, + }, + wantErr: false, + }, + { + name: "helper failure on bad response", + args: args{ + ctx: context.Background(), + esqlQuery: &Query{ + transport: mockTransp{badPayloadRoundTripFunc}, + values: make(url.Values), + headers: make(http.Header), + buf: bytes.NewBuffer(nil), + req: &Request{ + Query: `FROM docs`, + }, + }, + }, + wantErr: true, + }, + { + name: "helper failure on error", + args: args{ + ctx: context.Background(), + esqlQuery: &Query{ + transport: mockTransp{customErrorRoundTripFunc}, + values: make(url.Values), + headers: make(http.Header), + buf: bytes.NewBuffer(nil), + req: &Request{ + Query: `FROM docs`, + }, + }, + }, + wantErr: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, err := Helper[Doc](tt.args.ctx, tt.args.esqlQuery) + if (err != nil) != tt.wantErr { + t.Errorf("Helper() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("Helper() got = %v, want %v", got, tt.want) + } + }) + } +} + +func TestNewIteratorHelper(t *testing.T) { + type args struct { + ctx context.Context + query *Query + } + type testCase[T any] struct { + name string + args args + want []Doc + wantErr bool + } + tests := []testCase[Doc]{ + { + name: "Simple iterator", + args: args{ + ctx: context.Background(), + query: &Query{ + transport: mockTransp{}, + values: make(url.Values), + headers: make(http.Header), + buf: bytes.NewBuffer(nil), + req: &Request{ + Query: `FROM docs`, + }, + }, + }, + want: []Doc{ + {Author: "James S.A. Corey", Name: "Leviathan Wakes", PageCount: 561, ReleaseDate: "2011-06-02T00:00:00.000Z"}, + {Author: "Dan Simmons", Name: "Hyperion", PageCount: 482, ReleaseDate: "1989-05-26T00:00:00.000Z"}, + {Author: "Frank Herbert", Name: "Dune", PageCount: 604, ReleaseDate: "1965-06-01T00:00:00.000Z"}, + {Author: "Frank Herbert", Name: "Dune Messiah", PageCount: 331, ReleaseDate: "1969-10-15T00:00:00.000Z"}, + {Author: "Frank Herbert", Name: "Children of Dune", PageCount: 408, ReleaseDate: "1976-04-21T00:00:00.000Z"}, + {Author: "Frank Herbert", Name: "God Emperor of Dune", PageCount: 454, ReleaseDate: "1981-05-28T00:00:00.000Z"}, + {Author: "Iain M. Banks", Name: "Consider Phlebas", PageCount: 471, ReleaseDate: "1987-04-23T00:00:00.000Z"}, + {Author: "Peter F. Hamilton", Name: "Pandora's Star", PageCount: 768, ReleaseDate: "2004-03-02T00:00:00.000Z"}, + {Author: "Alastair Reynolds", Name: "Revelation Space", PageCount: 585, ReleaseDate: "2000-03-15T00:00:00.000Z"}, + {Author: "Vernor Vinge", Name: "A Fire Upon the Deep", PageCount: 613, ReleaseDate: "1992-06-01T00:00:00.000Z"}, + {Author: "Orson Scott Card", Name: "Ender's Game", PageCount: 324, ReleaseDate: "1985-06-01T00:00:00.000Z"}, + {Author: "George Orwell", Name: "1984", PageCount: 328, ReleaseDate: "1985-06-01T00:00:00.000Z"}, + {Author: "Ray Bradbury", Name: "Fahrenheit 451", PageCount: 227, ReleaseDate: "1953-10-15T00:00:00.000Z"}, + {Author: "Aldous Huxley", Name: "Brave New World", PageCount: 268, ReleaseDate: "1932-06-01T00:00:00.000Z"}, + {Author: "Isaac Asimov", Name: "Foundation", PageCount: 224, ReleaseDate: "1951-06-01T00:00:00.000Z"}, + {Author: "Lois Lowry", Name: "The Giver", PageCount: 208, ReleaseDate: "1993-04-26T00:00:00.000Z"}, + {Author: "Kurt Vonnegut", Name: "Slaughterhouse-Five", PageCount: 275, ReleaseDate: "1969-06-01T00:00:00.000Z"}, + {Author: "Douglas Adams", Name: "The Hitchhiker's Guide to the Galaxy", PageCount: 180, ReleaseDate: "1979-10-12T00:00:00.000Z"}, + {Author: "Neal Stephenson", Name: "Snow Crash", PageCount: 470, ReleaseDate: "1992-06-01T00:00:00.000Z"}, + {Author: "William Gibson", Name: "Neuromancer", PageCount: 271, ReleaseDate: "1984-07-01T00:00:00.000Z"}, + {Author: "Margaret Atwood", Name: "The Handmaid's Tale", PageCount: 311, ReleaseDate: "1985-06-01T00:00:00.000Z"}, + {Author: "Robert A. Heinlein", Name: "Starship Troopers", PageCount: 335, ReleaseDate: "1959-12-01T00:00:00.000Z"}, + {Author: "Ursula K. Le Guin", Name: "The Left Hand of Darkness", PageCount: 304, ReleaseDate: "1969-06-01T00:00:00.000Z"}, + {Author: "Robert A. Heinlein", Name: "The Moon is a Harsh Mistress", PageCount: 288, ReleaseDate: "1966-04-01T00:00:00.000Z"}, + }, + wantErr: false, + }, + { + name: "Iterator failure on bad response", + args: args{ + ctx: context.Background(), + query: &Query{ + transport: mockTransp{badPayloadRoundTripFunc}, + values: make(url.Values), + headers: make(http.Header), + buf: bytes.NewBuffer(nil), + req: &Request{ + Query: `FROM docs`, + }, + }, + }, + wantErr: true, + }, + { + name: "Iterator failure on error", + args: args{ + ctx: context.Background(), + query: &Query{ + transport: mockTransp{customErrorRoundTripFunc}, + values: make(url.Values), + headers: make(http.Header), + buf: bytes.NewBuffer(nil), + req: &Request{ + Query: `FROM docs`, + }, + }, + }, + wantErr: true, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + iter, err := NewIteratorHelper[Doc](tt.args.ctx, tt.args.query) + if (err != nil) != tt.wantErr { + t.Errorf("NewIteratorHelper() error = %v, wantErr %v", err, tt.wantErr) + return + } + + if tt.wantErr == false { + var i int + for iter.More() { + if doc, err := iter.Next(); err != nil { + t.Errorf("Iterator.Next() error = %v", err) + } else { + if reflect.DeepEqual(doc, tt.want[i]) { + t.Errorf("NewIteratorHelper() got = %v, want %v", doc, tt.want) + } + } + i++ + } + } + }) + } +} diff --git a/typedapi/esql/query/query.go b/typedapi/esql/query/query.go index ae7ee18e35..07d306cdda 100644 --- a/typedapi/esql/query/query.go +++ b/typedapi/esql/query/query.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Executes an ESQL request +// Executes an ES|QL request package query import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -72,7 +73,7 @@ func NewQueryFunc(tp elastictransport.Interface) NewQuery { } } -// Executes an ESQL request +// Executes an ES|QL request // // https://www.elastic.co/guide/en/elasticsearch/reference/current/esql-rest.html func New(tp elastictransport.Interface) *Query { @@ -306,6 +307,50 @@ func (r *Query) Delimiter(delimiter string) *Query { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Query) ErrorTrace(errortrace bool) *Query { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Query) FilterPath(filterpaths ...string) *Query { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Query) Human(human bool) *Query { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Query) Pretty(pretty bool) *Query { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Columnar By default, ES|QL returns results as rows. For example, FROM returns each // individual document as one row. For the JSON, YAML, CBOR and smile formats, // ES|QL can return the results in a columnar fashion where one row represents diff --git a/typedapi/esql/query/request.go b/typedapi/esql/query/request.go index 3e883d591a..590386b662 100644 --- a/typedapi/esql/query/request.go +++ b/typedapi/esql/query/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package query @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package query // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/esql/query/QueryRequest.ts#L24-L64 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/esql/query/QueryRequest.ts#L24-L65 type Request struct { // Columnar By default, ES|QL returns results as rows. For example, FROM returns each @@ -53,6 +53,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/esql/query/response.go b/typedapi/esql/query/response.go index 0c7416f269..9f429458a9 100644 --- a/typedapi/esql/query/response.go +++ b/typedapi/esql/query/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package query // Response holds the response body struct for the package query // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/esql/query/QueryResponse.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/esql/query/QueryResponse.ts#L22-L25 type Response = []byte diff --git a/typedapi/features/getfeatures/get_features.go b/typedapi/features/getfeatures/get_features.go index a4ae907050..0eaaf00755 100644 --- a/typedapi/features/getfeatures/get_features.go +++ b/typedapi/features/getfeatures/get_features.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Gets a list of features which can be included in snapshots using the // feature_states field when creating a snapshot @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -248,7 +248,7 @@ func (r GetFeatures) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +275,47 @@ func (r *GetFeatures) Header(key, value string) *GetFeatures { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetFeatures) ErrorTrace(errortrace bool) *GetFeatures { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetFeatures) FilterPath(filterpaths ...string) *GetFeatures { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetFeatures) Human(human bool) *GetFeatures { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetFeatures) Pretty(pretty bool) *GetFeatures { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/features/getfeatures/response.go b/typedapi/features/getfeatures/response.go index 129bf560b7..e9a8f2f9e6 100644 --- a/typedapi/features/getfeatures/response.go +++ b/typedapi/features/getfeatures/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getfeatures @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getfeatures // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/features/get_features/GetFeaturesResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/features/get_features/GetFeaturesResponse.ts#L22-L26 type Response struct { Features []types.Feature `json:"features"` } diff --git a/typedapi/features/resetfeatures/reset_features.go b/typedapi/features/resetfeatures/reset_features.go index c0a0d6e452..48f42d2f54 100644 --- a/typedapi/features/resetfeatures/reset_features.go +++ b/typedapi/features/resetfeatures/reset_features.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Resets the internal state of features, usually by deleting system indices package resetfeatures @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -248,7 +248,7 @@ func (r ResetFeatures) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +275,47 @@ func (r *ResetFeatures) Header(key, value string) *ResetFeatures { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ResetFeatures) ErrorTrace(errortrace bool) *ResetFeatures { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ResetFeatures) FilterPath(filterpaths ...string) *ResetFeatures { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ResetFeatures) Human(human bool) *ResetFeatures { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ResetFeatures) Pretty(pretty bool) *ResetFeatures { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/features/resetfeatures/response.go b/typedapi/features/resetfeatures/response.go index d2494d9c34..6ab6ef059f 100644 --- a/typedapi/features/resetfeatures/response.go +++ b/typedapi/features/resetfeatures/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package resetfeatures @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package resetfeatures // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/features/reset_features/ResetFeaturesResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/features/reset_features/ResetFeaturesResponse.ts#L22-L26 type Response struct { Features []types.Feature `json:"features"` } diff --git a/typedapi/fleet/globalcheckpoints/global_checkpoints.go b/typedapi/fleet/globalcheckpoints/global_checkpoints.go index 67de3503ae..55eef83666 100644 --- a/typedapi/fleet/globalcheckpoints/global_checkpoints.go +++ b/typedapi/fleet/globalcheckpoints/global_checkpoints.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns the current global checkpoints for an index. This API is design for // internal use by the fleet server project. @@ -28,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -271,7 +270,7 @@ func (r GlobalCheckpoints) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -353,3 +352,47 @@ func (r *GlobalCheckpoints) Timeout(duration string) *GlobalCheckpoints { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GlobalCheckpoints) ErrorTrace(errortrace bool) *GlobalCheckpoints { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GlobalCheckpoints) FilterPath(filterpaths ...string) *GlobalCheckpoints { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GlobalCheckpoints) Human(human bool) *GlobalCheckpoints { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GlobalCheckpoints) Pretty(pretty bool) *GlobalCheckpoints { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/fleet/globalcheckpoints/response.go b/typedapi/fleet/globalcheckpoints/response.go index d736a05d6d..77e4b99712 100644 --- a/typedapi/fleet/globalcheckpoints/response.go +++ b/typedapi/fleet/globalcheckpoints/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package globalcheckpoints // Response holds the response body struct for the package globalcheckpoints // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/fleet/global_checkpoints/GlobalCheckpointsResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/fleet/global_checkpoints/GlobalCheckpointsResponse.ts#L22-L27 type Response struct { GlobalCheckpoints []int64 `json:"global_checkpoints"` TimedOut bool `json:"timed_out"` diff --git a/typedapi/fleet/msearch/msearch.go b/typedapi/fleet/msearch/msearch.go index ab381ce3f4..d51b7ac3ea 100644 --- a/typedapi/fleet/msearch/msearch.go +++ b/typedapi/fleet/msearch/msearch.go @@ -16,11 +16,15 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Multi Search API where the search will only be executed after specified -// checkpoints are available due to a refresh. This API is designed for internal -// use by the fleet server project. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Executes several [fleet +// searches](https://www.elastic.co/guide/en/elasticsearch/reference/current/fleet-search.html) +// with a single API request. +// The API follows the same structure as the [multi +// search](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-multi-search.html) +// API. However, similar to the fleet search API, it +// supports the wait_for_checkpoints parameter. package msearch import ( @@ -83,9 +87,13 @@ func NewMsearchFunc(tp elastictransport.Interface) NewMsearch { } } -// Multi Search API where the search will only be executed after specified -// checkpoints are available due to a refresh. This API is designed for internal -// use by the fleet server project. +// Executes several [fleet +// searches](https://www.elastic.co/guide/en/elasticsearch/reference/current/fleet-search.html) +// with a single API request. +// The API follows the same structure as the [multi +// search](https://www.elastic.co/guide/en/elasticsearch/reference/current/search-multi-search.html) +// API. However, similar to the fleet search API, it +// supports the wait_for_checkpoints parameter. func New(tp elastictransport.Interface) *Msearch { r := &Msearch{ transport: tp, @@ -467,3 +475,47 @@ func (r *Msearch) AllowPartialSearchResults(allowpartialsearchresults bool) *Mse return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Msearch) ErrorTrace(errortrace bool) *Msearch { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Msearch) FilterPath(filterpaths ...string) *Msearch { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Msearch) Human(human bool) *Msearch { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Msearch) Pretty(pretty bool) *Msearch { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/fleet/msearch/request.go b/typedapi/fleet/msearch/request.go index 32d897b85d..db09d0a5cf 100644 --- a/typedapi/fleet/msearch/request.go +++ b/typedapi/fleet/msearch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package msearch @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package msearch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/fleet/msearch/MultiSearchRequest.ts#L32-L115 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/fleet/msearch/MultiSearchRequest.ts#L32-L115 type Request = []types.MsearchRequestItem diff --git a/typedapi/fleet/msearch/response.go b/typedapi/fleet/msearch/response.go index 7d0cf6c001..d8dba07aaf 100644 --- a/typedapi/fleet/msearch/response.go +++ b/typedapi/fleet/msearch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package msearch @@ -32,7 +32,7 @@ import ( // Response holds the response body struct for the package msearch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/fleet/msearch/MultiSearchResponse.ts#L25-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/fleet/msearch/MultiSearchResponse.ts#L25-L29 type Response struct { Docs []types.MsearchResponseItem `json:"docs"` } diff --git a/typedapi/fleet/postsecret/post_secret.go b/typedapi/fleet/postsecret/post_secret.go index 9f557e3df8..d30b966f66 100644 --- a/typedapi/fleet/postsecret/post_secret.go +++ b/typedapi/fleet/postsecret/post_secret.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates a secret stored by Fleet. package postsecret @@ -26,7 +26,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strings" @@ -195,7 +194,7 @@ func (r PostSecret) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err diff --git a/typedapi/fleet/search/request.go b/typedapi/fleet/search/request.go index 1081ce088a..4a957fb2c5 100644 --- a/typedapi/fleet/search/request.go +++ b/typedapi/fleet/search/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package search @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/fleet/search/SearchRequest.ts#L55-L260 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/fleet/search/SearchRequest.ts#L55-L260 type Request struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` Collapse *types.FieldCollapse `json:"collapse,omitempty"` @@ -129,6 +129,7 @@ func NewRequest() *Request { Ext: make(map[string]json.RawMessage, 0), ScriptFields: make(map[string]types.ScriptField, 0), } + return r } @@ -177,7 +178,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "explain": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -205,7 +206,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "from": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -230,7 +231,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "min_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -256,7 +257,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "profile": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -309,7 +310,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "seq_no_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -324,7 +325,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -391,7 +392,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "terminate_after": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -418,7 +419,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Timeout = &o case "track_scores": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -437,7 +438,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "version": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/fleet/search/response.go b/typedapi/fleet/search/response.go index fcf2cfd2bb..23f714a674 100644 --- a/typedapi/fleet/search/response.go +++ b/typedapi/fleet/search/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package search @@ -34,7 +34,7 @@ import ( // Response holds the response body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/fleet/search/SearchResponse.ts#L33-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/fleet/search/SearchResponse.ts#L33-L50 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` Clusters_ *types.ClusterStatistics `json:"_clusters,omitempty"` @@ -532,7 +532,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := types.NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -582,7 +582,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -592,7 +592,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -620,7 +620,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "max_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -636,7 +636,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "num_reduce_phases": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -714,7 +714,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -724,7 +724,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Suggest") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -734,7 +734,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "terminated_early": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -748,7 +748,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "timed_out": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -762,7 +762,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "took": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/fleet/search/search.go b/typedapi/fleet/search/search.go index 599c488c6a..e66fbf330f 100644 --- a/typedapi/fleet/search/search.go +++ b/typedapi/fleet/search/search.go @@ -16,11 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Search API where the search will only be executed after specified checkpoints -// are available due to a refresh. This API is designed for internal use by the -// fleet server project. +// The purpose of the fleet search api is to provide a search api where the +// search will only be executed +// after provided checkpoint has been processed and is visible for searches +// inside of Elasticsearch. package search import ( @@ -87,9 +88,10 @@ func NewSearchFunc(tp elastictransport.Interface) NewSearch { } } -// Search API where the search will only be executed after specified checkpoints -// are available due to a refresh. This API is designed for internal use by the -// fleet server project. +// The purpose of the fleet search api is to provide a search api where the +// search will only be executed +// after provided checkpoint has been processed and is visible for searches +// inside of Elasticsearch. func New(tp elastictransport.Interface) *Search { r := &Search{ transport: tp, @@ -556,6 +558,50 @@ func (r *Search) AllowPartialSearchResults(allowpartialsearchresults bool) *Sear return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Search) ErrorTrace(errortrace bool) *Search { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Search) FilterPath(filterpaths ...string) *Search { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Search) Human(human bool) *Search { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Search) Pretty(pretty bool) *Search { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: aggregations func (r *Search) Aggregations(aggregations map[string]types.Aggregations) *Search { diff --git a/typedapi/graph/explore/explore.go b/typedapi/graph/explore/explore.go index a960f8e5c4..81e477472a 100644 --- a/typedapi/graph/explore/explore.go +++ b/typedapi/graph/explore/explore.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Explore extracted and summarized information about the documents and terms in -// an index. +// Extracts and summarizes information about the documents and terms in an +// Elasticsearch data stream or index. package explore import ( @@ -31,6 +31,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -81,8 +82,8 @@ func NewExploreFunc(tp elastictransport.Interface) NewExplore { } } -// Explore extracted and summarized information about the documents and terms in -// an index. +// Extracts and summarizes information about the documents and terms in an +// Elasticsearch data stream or index. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/graph-explore-api.html func New(tp elastictransport.Interface) *Explore { @@ -335,6 +336,50 @@ func (r *Explore) Timeout(duration string) *Explore { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Explore) ErrorTrace(errortrace bool) *Explore { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Explore) FilterPath(filterpaths ...string) *Explore { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Explore) Human(human bool) *Explore { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Explore) Pretty(pretty bool) *Explore { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Connections Specifies or more fields from which you want to extract terms that are // associated with the specified vertices. // API name: connections diff --git a/typedapi/graph/explore/request.go b/typedapi/graph/explore/request.go index d84bf1e590..5d950c7657 100644 --- a/typedapi/graph/explore/request.go +++ b/typedapi/graph/explore/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package explore @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package explore // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/graph/explore/GraphExploreRequest.ts#L28-L72 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/graph/explore/GraphExploreRequest.ts#L28-L72 type Request struct { // Connections Specifies or more fields from which you want to extract terms that are @@ -48,6 +48,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/graph/explore/response.go b/typedapi/graph/explore/response.go index ef850719de..455fda1c13 100644 --- a/typedapi/graph/explore/response.go +++ b/typedapi/graph/explore/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package explore @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package explore // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/graph/explore/GraphExploreResponse.ts#L25-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/graph/explore/GraphExploreResponse.ts#L25-L33 type Response struct { Connections []types.Connection `json:"connections"` Failures []types.ShardFailure `json:"failures"` diff --git a/typedapi/ilm/deletelifecycle/delete_lifecycle.go b/typedapi/ilm/deletelifecycle/delete_lifecycle.go index 3ae00fcc97..60298c3987 100644 --- a/typedapi/ilm/deletelifecycle/delete_lifecycle.go +++ b/typedapi/ilm/deletelifecycle/delete_lifecycle.go @@ -16,10 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes the specified lifecycle policy definition. A currently used policy -// cannot be deleted. +// Deletes the specified lifecycle policy definition. You cannot delete policies +// that are currently in use. If the policy is being used to manage any indices, +// the request fails and returns an error. package deletelifecycle import ( @@ -28,9 +29,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -77,8 +78,9 @@ func NewDeleteLifecycleFunc(tp elastictransport.Interface) NewDeleteLifecycle { } } -// Deletes the specified lifecycle policy definition. A currently used policy -// cannot be deleted. +// Deletes the specified lifecycle policy definition. You cannot delete policies +// that are currently in use. If the policy is being used to manage any indices, +// the request fails and returns an error. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-delete-lifecycle.html func New(tp elastictransport.Interface) *DeleteLifecycle { @@ -264,7 +266,7 @@ func (r DeleteLifecycle) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -318,3 +320,47 @@ func (r *DeleteLifecycle) Timeout(duration string) *DeleteLifecycle { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteLifecycle) ErrorTrace(errortrace bool) *DeleteLifecycle { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteLifecycle) FilterPath(filterpaths ...string) *DeleteLifecycle { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteLifecycle) Human(human bool) *DeleteLifecycle { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteLifecycle) Pretty(pretty bool) *DeleteLifecycle { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ilm/deletelifecycle/response.go b/typedapi/ilm/deletelifecycle/response.go index 9aa7f07023..2f82eee671 100644 --- a/typedapi/ilm/deletelifecycle/response.go +++ b/typedapi/ilm/deletelifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletelifecycle // Response holds the response body struct for the package deletelifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/delete_lifecycle/DeleteLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/delete_lifecycle/DeleteLifecycleResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ilm/explainlifecycle/explain_lifecycle.go b/typedapi/ilm/explainlifecycle/explain_lifecycle.go index cd06f2744a..c226d64b9f 100644 --- a/typedapi/ilm/explainlifecycle/explain_lifecycle.go +++ b/typedapi/ilm/explainlifecycle/explain_lifecycle.go @@ -16,10 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Retrieves information about the index's current lifecycle state, such as the -// currently executing phase, action, and step. +// Retrieves information about the index’s current lifecycle state, such as the +// currently executing phase, action, and step. Shows when the index entered +// each one, the definition of the running phase, and information about any +// failures. package explainlifecycle import ( @@ -28,7 +30,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -78,8 +79,10 @@ func NewExplainLifecycleFunc(tp elastictransport.Interface) NewExplainLifecycle } } -// Retrieves information about the index's current lifecycle state, such as the -// currently executing phase, action, and step. +// Retrieves information about the index’s current lifecycle state, such as the +// currently executing phase, action, and step. Shows when the index entered +// each one, the definition of the running phase, and information about any +// failures. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-explain-lifecycle.html func New(tp elastictransport.Interface) *ExplainLifecycle { @@ -265,7 +268,7 @@ func (r ExplainLifecycle) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -339,3 +342,47 @@ func (r *ExplainLifecycle) Timeout(duration string) *ExplainLifecycle { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ExplainLifecycle) ErrorTrace(errortrace bool) *ExplainLifecycle { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ExplainLifecycle) FilterPath(filterpaths ...string) *ExplainLifecycle { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ExplainLifecycle) Human(human bool) *ExplainLifecycle { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ExplainLifecycle) Pretty(pretty bool) *ExplainLifecycle { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ilm/explainlifecycle/response.go b/typedapi/ilm/explainlifecycle/response.go index 6412a1a0ac..cb9a544c8a 100644 --- a/typedapi/ilm/explainlifecycle/response.go +++ b/typedapi/ilm/explainlifecycle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package explainlifecycle @@ -31,7 +31,7 @@ import ( // Response holds the response body struct for the package explainlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/explain_lifecycle/ExplainLifecycleResponse.ts#L24-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/explain_lifecycle/ExplainLifecycleResponse.ts#L24-L28 type Response struct { Indices map[string]types.LifecycleExplain `json:"indices"` } @@ -65,7 +65,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) diff --git a/typedapi/ilm/getlifecycle/get_lifecycle.go b/typedapi/ilm/getlifecycle/get_lifecycle.go index 08656d2f11..660bf13d8d 100644 --- a/typedapi/ilm/getlifecycle/get_lifecycle.go +++ b/typedapi/ilm/getlifecycle/get_lifecycle.go @@ -16,10 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns the specified policy definition. Includes the policy version and last -// modified date. +// Retrieves a lifecycle policy. package getlifecycle import ( @@ -28,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -75,8 +74,7 @@ func NewGetLifecycleFunc(tp elastictransport.Interface) NewGetLifecycle { } } -// Returns the specified policy definition. Includes the policy version and last -// modified date. +// Retrieves a lifecycle policy. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-get-lifecycle.html func New(tp elastictransport.Interface) *GetLifecycle { @@ -269,7 +267,7 @@ func (r GetLifecycle) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -323,3 +321,47 @@ func (r *GetLifecycle) Timeout(duration string) *GetLifecycle { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetLifecycle) ErrorTrace(errortrace bool) *GetLifecycle { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetLifecycle) FilterPath(filterpaths ...string) *GetLifecycle { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetLifecycle) Human(human bool) *GetLifecycle { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetLifecycle) Pretty(pretty bool) *GetLifecycle { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ilm/getlifecycle/response.go b/typedapi/ilm/getlifecycle/response.go index d4a68cef3f..22d4588a28 100644 --- a/typedapi/ilm/getlifecycle/response.go +++ b/typedapi/ilm/getlifecycle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getlifecycle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/get_lifecycle/GetLifecycleResponse.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/get_lifecycle/GetLifecycleResponse.ts#L23-L26 type Response map[string]types.Lifecycle diff --git a/typedapi/ilm/getstatus/get_status.go b/typedapi/ilm/getstatus/get_status.go index d7f8a11efc..e6cf89e286 100644 --- a/typedapi/ilm/getstatus/get_status.go +++ b/typedapi/ilm/getstatus/get_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves the current index lifecycle management (ILM) status. package getstatus @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -248,7 +248,7 @@ func (r GetStatus) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +275,47 @@ func (r *GetStatus) Header(key, value string) *GetStatus { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetStatus) ErrorTrace(errortrace bool) *GetStatus { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetStatus) FilterPath(filterpaths ...string) *GetStatus { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetStatus) Human(human bool) *GetStatus { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetStatus) Pretty(pretty bool) *GetStatus { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ilm/getstatus/response.go b/typedapi/ilm/getstatus/response.go index dd5342d9f3..2a29a13b97 100644 --- a/typedapi/ilm/getstatus/response.go +++ b/typedapi/ilm/getstatus/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getstatus @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getstatus // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/get_status/GetIlmStatusResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/get_status/GetIlmStatusResponse.ts#L22-L24 type Response struct { OperationMode lifecycleoperationmode.LifecycleOperationMode `json:"operation_mode"` } diff --git a/typedapi/ilm/migratetodatatiers/migrate_to_data_tiers.go b/typedapi/ilm/migratetodatatiers/migrate_to_data_tiers.go index 3f5d3b9d9e..4cbcd8c237 100644 --- a/typedapi/ilm/migratetodatatiers/migrate_to_data_tiers.go +++ b/typedapi/ilm/migratetodatatiers/migrate_to_data_tiers.go @@ -16,10 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Migrates the indices and ILM policies away from custom node attribute -// allocation routing to data tiers routing +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Switches the indices, ILM policies, and legacy, composable and component +// templates from using custom node attributes and +// attribute-based allocation filters to using data tiers, and optionally +// deletes one legacy index template.+ +// Using node roles enables ILM to automatically move the indices between data +// tiers. package migratetodatatiers import ( @@ -74,8 +78,12 @@ func NewMigrateToDataTiersFunc(tp elastictransport.Interface) NewMigrateToDataTi } } -// Migrates the indices and ILM policies away from custom node attribute -// allocation routing to data tiers routing +// Switches the indices, ILM policies, and legacy, composable and component +// templates from using custom node attributes and +// attribute-based allocation filters to using data tiers, and optionally +// deletes one legacy index template.+ +// Using node roles enables ILM to automatically move the indices between data +// tiers. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-migrate-to-data-tiers.html func New(tp elastictransport.Interface) *MigrateToDataTiers { @@ -305,6 +313,50 @@ func (r *MigrateToDataTiers) DryRun(dryrun bool) *MigrateToDataTiers { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *MigrateToDataTiers) ErrorTrace(errortrace bool) *MigrateToDataTiers { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *MigrateToDataTiers) FilterPath(filterpaths ...string) *MigrateToDataTiers { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *MigrateToDataTiers) Human(human bool) *MigrateToDataTiers { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *MigrateToDataTiers) Pretty(pretty bool) *MigrateToDataTiers { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: legacy_template_to_delete func (r *MigrateToDataTiers) LegacyTemplateToDelete(legacytemplatetodelete string) *MigrateToDataTiers { diff --git a/typedapi/ilm/migratetodatatiers/request.go b/typedapi/ilm/migratetodatatiers/request.go index 1bf46fe54d..73ead330d3 100644 --- a/typedapi/ilm/migratetodatatiers/request.go +++ b/typedapi/ilm/migratetodatatiers/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package migratetodatatiers @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package migratetodatatiers // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/migrate_to_data_tiers/Request.ts#L22-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/migrate_to_data_tiers/Request.ts#L22-L43 type Request struct { LegacyTemplateToDelete *string `json:"legacy_template_to_delete,omitempty"` NodeAttribute *string `json:"node_attribute,omitempty"` @@ -36,6 +36,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ilm/migratetodatatiers/response.go b/typedapi/ilm/migratetodatatiers/response.go index 7a1f707c40..db155e2d49 100644 --- a/typedapi/ilm/migratetodatatiers/response.go +++ b/typedapi/ilm/migratetodatatiers/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package migratetodatatiers @@ -31,7 +31,7 @@ import ( // Response holds the response body struct for the package migratetodatatiers // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/migrate_to_data_tiers/Response.ts#L22-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/migrate_to_data_tiers/Response.ts#L22-L32 type Response struct { DryRun bool `json:"dry_run"` MigratedComponentTemplates []string `json:"migrated_component_templates"` @@ -63,7 +63,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { switch t { case "dry_run": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ilm/movetostep/move_to_step.go b/typedapi/ilm/movetostep/move_to_step.go index 4ec067bd9c..2a1cb53778 100644 --- a/typedapi/ilm/movetostep/move_to_step.go +++ b/typedapi/ilm/movetostep/move_to_step.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Manually moves an index into the specified step and executes that step. package movetostep @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -314,6 +315,50 @@ func (r *MoveToStep) _index(index string) *MoveToStep { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *MoveToStep) ErrorTrace(errortrace bool) *MoveToStep { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *MoveToStep) FilterPath(filterpaths ...string) *MoveToStep { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *MoveToStep) Human(human bool) *MoveToStep { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *MoveToStep) Pretty(pretty bool) *MoveToStep { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: current_step func (r *MoveToStep) CurrentStep(currentstep *types.StepKey) *MoveToStep { diff --git a/typedapi/ilm/movetostep/request.go b/typedapi/ilm/movetostep/request.go index 604789f8b1..44626bdcaa 100644 --- a/typedapi/ilm/movetostep/request.go +++ b/typedapi/ilm/movetostep/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package movetostep @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package movetostep // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/move_to_step/MoveToStepRequest.ts#L24-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/move_to_step/MoveToStepRequest.ts#L24-L36 type Request struct { CurrentStep *types.StepKey `json:"current_step,omitempty"` NextStep *types.StepKey `json:"next_step,omitempty"` @@ -38,6 +38,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ilm/movetostep/response.go b/typedapi/ilm/movetostep/response.go index ecc2f48287..dfc60b7ff8 100644 --- a/typedapi/ilm/movetostep/response.go +++ b/typedapi/ilm/movetostep/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package movetostep // Response holds the response body struct for the package movetostep // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/move_to_step/MoveToStepResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/move_to_step/MoveToStepResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ilm/putlifecycle/put_lifecycle.go b/typedapi/ilm/putlifecycle/put_lifecycle.go index f7a4e13a12..18e3745737 100644 --- a/typedapi/ilm/putlifecycle/put_lifecycle.go +++ b/typedapi/ilm/putlifecycle/put_lifecycle.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates a lifecycle policy +// Creates a lifecycle policy. If the specified policy exists, the policy is +// replaced and the policy version is incremented. package putlifecycle import ( @@ -30,6 +31,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -80,7 +82,8 @@ func NewPutLifecycleFunc(tp elastictransport.Interface) NewPutLifecycle { } } -// Creates a lifecycle policy +// Creates a lifecycle policy. If the specified policy exists, the policy is +// replaced and the policy version is incremented. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ilm-put-lifecycle.html func New(tp elastictransport.Interface) *PutLifecycle { @@ -331,3 +334,47 @@ func (r *PutLifecycle) Timeout(duration string) *PutLifecycle { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutLifecycle) ErrorTrace(errortrace bool) *PutLifecycle { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutLifecycle) FilterPath(filterpaths ...string) *PutLifecycle { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutLifecycle) Human(human bool) *PutLifecycle { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutLifecycle) Pretty(pretty bool) *PutLifecycle { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ilm/putlifecycle/request.go b/typedapi/ilm/putlifecycle/request.go index cdf0542f62..11c6eedb8b 100644 --- a/typedapi/ilm/putlifecycle/request.go +++ b/typedapi/ilm/putlifecycle/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putlifecycle @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/put_lifecycle/PutLifecycleRequest.ts#L25-L55 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/put_lifecycle/PutLifecycleRequest.ts#L25-L55 type Request struct { Policy *types.IlmPolicy `json:"policy,omitempty"` } @@ -37,6 +37,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ilm/putlifecycle/response.go b/typedapi/ilm/putlifecycle/response.go index 7ff9083cf2..39a4de4892 100644 --- a/typedapi/ilm/putlifecycle/response.go +++ b/typedapi/ilm/putlifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putlifecycle // Response holds the response body struct for the package putlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/put_lifecycle/PutLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/put_lifecycle/PutLifecycleResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ilm/removepolicy/remove_policy.go b/typedapi/ilm/removepolicy/remove_policy.go index 0afa7fbf82..cedd6f837b 100644 --- a/typedapi/ilm/removepolicy/remove_policy.go +++ b/typedapi/ilm/removepolicy/remove_policy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Removes the assigned lifecycle policy and stops managing the specified index package removepolicy @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -262,7 +262,7 @@ func (r RemovePolicy) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -298,3 +298,47 @@ func (r *RemovePolicy) _index(index string) *RemovePolicy { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *RemovePolicy) ErrorTrace(errortrace bool) *RemovePolicy { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *RemovePolicy) FilterPath(filterpaths ...string) *RemovePolicy { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *RemovePolicy) Human(human bool) *RemovePolicy { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *RemovePolicy) Pretty(pretty bool) *RemovePolicy { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ilm/removepolicy/response.go b/typedapi/ilm/removepolicy/response.go index ad4baa5812..3d60b529cb 100644 --- a/typedapi/ilm/removepolicy/response.go +++ b/typedapi/ilm/removepolicy/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package removepolicy // Response holds the response body struct for the package removepolicy // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/remove_policy/RemovePolicyResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/remove_policy/RemovePolicyResponse.ts#L22-L27 type Response struct { FailedIndexes []string `json:"failed_indexes"` HasFailures bool `json:"has_failures"` diff --git a/typedapi/ilm/retry/response.go b/typedapi/ilm/retry/response.go index 1ac8853ee8..1d24db1bda 100644 --- a/typedapi/ilm/retry/response.go +++ b/typedapi/ilm/retry/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package retry // Response holds the response body struct for the package retry // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/retry/RetryIlmResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/retry/RetryIlmResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ilm/retry/retry.go b/typedapi/ilm/retry/retry.go index 5a98790828..13de1f7dc0 100644 --- a/typedapi/ilm/retry/retry.go +++ b/typedapi/ilm/retry/retry.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retries executing the policy for an index that is in the ERROR step. package retry @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -262,7 +262,7 @@ func (r Retry) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -299,3 +299,47 @@ func (r *Retry) _index(index string) *Retry { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Retry) ErrorTrace(errortrace bool) *Retry { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Retry) FilterPath(filterpaths ...string) *Retry { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Retry) Human(human bool) *Retry { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Retry) Pretty(pretty bool) *Retry { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ilm/start/response.go b/typedapi/ilm/start/response.go index c057fc9c4d..132e7c835d 100644 --- a/typedapi/ilm/start/response.go +++ b/typedapi/ilm/start/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package start // Response holds the response body struct for the package start // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/start/StartIlmResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/start/StartIlmResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ilm/start/start.go b/typedapi/ilm/start/start.go index ef0baa79a2..a85eb2a8c4 100644 --- a/typedapi/ilm/start/start.go +++ b/typedapi/ilm/start/start.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Start the index lifecycle management (ILM) plugin. package start @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -248,7 +248,7 @@ func (r Start) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -289,3 +289,47 @@ func (r *Start) Timeout(duration string) *Start { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Start) ErrorTrace(errortrace bool) *Start { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Start) FilterPath(filterpaths ...string) *Start { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Start) Human(human bool) *Start { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Start) Pretty(pretty bool) *Start { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ilm/stop/response.go b/typedapi/ilm/stop/response.go index 8401b523ad..d027e1eb7e 100644 --- a/typedapi/ilm/stop/response.go +++ b/typedapi/ilm/stop/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stop // Response holds the response body struct for the package stop // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/stop/StopIlmResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/stop/StopIlmResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ilm/stop/stop.go b/typedapi/ilm/stop/stop.go index f143834fee..b15a7ffc33 100644 --- a/typedapi/ilm/stop/stop.go +++ b/typedapi/ilm/stop/stop.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Halts all lifecycle management operations and stops the index lifecycle // management (ILM) plugin @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -250,7 +250,7 @@ func (r Stop) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -291,3 +291,47 @@ func (r *Stop) Timeout(duration string) *Stop { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Stop) ErrorTrace(errortrace bool) *Stop { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Stop) FilterPath(filterpaths ...string) *Stop { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Stop) Human(human bool) *Stop { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Stop) Pretty(pretty bool) *Stop { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/addblock/add_block.go b/typedapi/indices/addblock/add_block.go index 642ea01d1c..0de04a0c25 100644 --- a/typedapi/indices/addblock/add_block.go +++ b/typedapi/indices/addblock/add_block.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Adds a block to an index. package addblock @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -273,7 +272,7 @@ func (r AddBlock) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -365,3 +364,47 @@ func (r *AddBlock) Timeout(duration string) *AddBlock { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *AddBlock) ErrorTrace(errortrace bool) *AddBlock { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *AddBlock) FilterPath(filterpaths ...string) *AddBlock { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *AddBlock) Human(human bool) *AddBlock { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *AddBlock) Pretty(pretty bool) *AddBlock { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/addblock/response.go b/typedapi/indices/addblock/response.go index 7f1ce84049..be775231ef 100644 --- a/typedapi/indices/addblock/response.go +++ b/typedapi/indices/addblock/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package addblock @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package addblock // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/add_block/IndicesAddBlockResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/add_block/IndicesAddBlockResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` Indices []types.IndicesBlockStatus `json:"indices"` diff --git a/typedapi/indices/analyze/analyze.go b/typedapi/indices/analyze/analyze.go index 3eb44e9d9a..1ace57d44b 100644 --- a/typedapi/indices/analyze/analyze.go +++ b/typedapi/indices/analyze/analyze.go @@ -16,10 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Performs the analysis process on a text and return the tokens breakdown of -// the text. +// Performs analysis on a text string and returns the resulting tokens. package analyze import ( @@ -31,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -79,8 +79,7 @@ func NewAnalyzeFunc(tp elastictransport.Interface) NewAnalyze { } } -// Performs the analysis process on a text and return the tokens breakdown of -// the text. +// Performs analysis on a text string and returns the resulting tokens. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-analyze.html func New(tp elastictransport.Interface) *Analyze { @@ -320,6 +319,50 @@ func (r *Analyze) Index(index string) *Analyze { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Analyze) ErrorTrace(errortrace bool) *Analyze { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Analyze) FilterPath(filterpaths ...string) *Analyze { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Analyze) Human(human bool) *Analyze { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Analyze) Pretty(pretty bool) *Analyze { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Analyzer The name of the analyzer that should be applied to the provided `text`. // This could be a built-in analyzer, or an analyzer that’s been configured in // the index. diff --git a/typedapi/indices/analyze/request.go b/typedapi/indices/analyze/request.go index e4df504a59..aca69a26ab 100644 --- a/typedapi/indices/analyze/request.go +++ b/typedapi/indices/analyze/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package analyze @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package analyze // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/analyze/IndicesAnalyzeRequest.ts#L27-L92 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/analyze/IndicesAnalyzeRequest.ts#L27-L92 type Request struct { // Analyzer The name of the analyzer that should be applied to the provided `text`. @@ -66,6 +66,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -160,7 +161,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } s.CharFilter = append(s.CharFilter, *o) default: - o := new(interface{}) + o := new(any) if err := localDec.Decode(&o); err != nil { return err } @@ -168,7 +169,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } default: source := bytes.NewReader(rawMsg) - o := new(interface{}) + o := new(any) if err := json.NewDecoder(source).Decode(&o); err != nil { return fmt.Errorf("%s | %w", "CharFilter", err) } @@ -177,7 +178,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "explain": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -495,7 +496,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } s.Filter = append(s.Filter, *o) default: - o := new(interface{}) + o := new(any) if err := localDec.Decode(&o); err != nil { return err } @@ -503,7 +504,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } default: source := bytes.NewReader(rawMsg) - o := new(interface{}) + o := new(any) if err := json.NewDecoder(source).Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Filter", err) } diff --git a/typedapi/indices/analyze/response.go b/typedapi/indices/analyze/response.go index ac20646f8a..87153a8402 100644 --- a/typedapi/indices/analyze/response.go +++ b/typedapi/indices/analyze/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package analyze @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package analyze // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/analyze/IndicesAnalyzeResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/analyze/IndicesAnalyzeResponse.ts#L22-L27 type Response struct { Detail *types.AnalyzeDetail `json:"detail,omitempty"` Tokens []types.AnalyzeToken `json:"tokens,omitempty"` diff --git a/typedapi/indices/clearcache/clear_cache.go b/typedapi/indices/clearcache/clear_cache.go index 8608d8f378..a5db872c29 100644 --- a/typedapi/indices/clearcache/clear_cache.go +++ b/typedapi/indices/clearcache/clear_cache.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Clears all or specific caches for one or more indices. +// Clears the caches of one or more indices. +// For data streams, the API clears the caches of the stream’s backing indices. package clearcache import ( @@ -27,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -76,7 +76,8 @@ func NewClearCacheFunc(tp elastictransport.Interface) NewClearCache { } } -// Clears all or specific caches for one or more indices. +// Clears the caches of one or more indices. +// For data streams, the API clears the caches of the stream’s backing indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-clearcache.html func New(tp elastictransport.Interface) *ClearCache { @@ -269,7 +270,7 @@ func (r ClearCache) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -377,3 +378,47 @@ func (r *ClearCache) Request(request bool) *ClearCache { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ClearCache) ErrorTrace(errortrace bool) *ClearCache { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ClearCache) FilterPath(filterpaths ...string) *ClearCache { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ClearCache) Human(human bool) *ClearCache { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ClearCache) Pretty(pretty bool) *ClearCache { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/clearcache/response.go b/typedapi/indices/clearcache/response.go index 2b0a8bbf4c..afc84dc5e6 100644 --- a/typedapi/indices/clearcache/response.go +++ b/typedapi/indices/clearcache/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clearcache @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcache // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/clear_cache/IndicesClearCacheResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/clear_cache/IndicesClearCacheResponse.ts#L22-L24 type Response struct { Shards_ types.ShardStatistics `json:"_shards"` } diff --git a/typedapi/indices/clone/clone.go b/typedapi/indices/clone/clone.go index 249b348e6b..365cd1b857 100644 --- a/typedapi/indices/clone/clone.go +++ b/typedapi/indices/clone/clone.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Clones an index +// Clones an existing index. package clone import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -85,7 +86,7 @@ func NewCloneFunc(tp elastictransport.Interface) NewClone { } } -// Clones an index +// Clones an existing index. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-clone-index.html func New(tp elastictransport.Interface) *Clone { @@ -363,6 +364,50 @@ func (r *Clone) WaitForActiveShards(waitforactiveshards string) *Clone { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Clone) ErrorTrace(errortrace bool) *Clone { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Clone) FilterPath(filterpaths ...string) *Clone { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Clone) Human(human bool) *Clone { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Clone) Pretty(pretty bool) *Clone { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Aliases Aliases for the resulting index. // API name: aliases func (r *Clone) Aliases(aliases map[string]types.Alias) *Clone { diff --git a/typedapi/indices/clone/request.go b/typedapi/indices/clone/request.go index e937b3ec89..2e4d7ca73a 100644 --- a/typedapi/indices/clone/request.go +++ b/typedapi/indices/clone/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clone @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package clone // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/clone/IndicesCloneRequest.ts#L27-L75 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/clone/IndicesCloneRequest.ts#L27-L75 type Request struct { // Aliases Aliases for the resulting index. @@ -44,6 +44,7 @@ func NewRequest() *Request { Aliases: make(map[string]types.Alias, 0), Settings: make(map[string]json.RawMessage, 0), } + return r } diff --git a/typedapi/indices/clone/response.go b/typedapi/indices/clone/response.go index 51eb82300a..46a436b5c4 100644 --- a/typedapi/indices/clone/response.go +++ b/typedapi/indices/clone/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clone // Response holds the response body struct for the package clone // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/clone/IndicesCloneResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/clone/IndicesCloneResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` Index string `json:"index"` diff --git a/typedapi/indices/close/close.go b/typedapi/indices/close/close.go index 8b08f4faac..ec7f78eb04 100644 --- a/typedapi/indices/close/close.go +++ b/typedapi/indices/close/close.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Closes an index. package close @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -262,7 +261,7 @@ func (r Close) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -365,3 +364,47 @@ func (r *Close) WaitForActiveShards(waitforactiveshards string) *Close { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Close) ErrorTrace(errortrace bool) *Close { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Close) FilterPath(filterpaths ...string) *Close { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Close) Human(human bool) *Close { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Close) Pretty(pretty bool) *Close { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/close/response.go b/typedapi/indices/close/response.go index 13132afde2..044fdc3fa0 100644 --- a/typedapi/indices/close/response.go +++ b/typedapi/indices/close/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package close @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package close // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/close/CloseIndexResponse.ts#L24-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/close/CloseIndexResponse.ts#L24-L30 type Response struct { Acknowledged bool `json:"acknowledged"` Indices map[string]types.CloseIndexResult `json:"indices"` diff --git a/typedapi/indices/create/create.go b/typedapi/indices/create/create.go index e0e142066b..17f8bfa778 100644 --- a/typedapi/indices/create/create.go +++ b/typedapi/indices/create/create.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates an index with optional settings and mappings. +// Creates a new index. package create import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -80,7 +81,7 @@ func NewCreateFunc(tp elastictransport.Interface) NewCreate { } } -// Creates an index with optional settings and mappings. +// Creates a new index. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-create-index.html func New(tp elastictransport.Interface) *Create { @@ -341,6 +342,50 @@ func (r *Create) WaitForActiveShards(waitforactiveshards string) *Create { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Create) ErrorTrace(errortrace bool) *Create { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Create) FilterPath(filterpaths ...string) *Create { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Create) Human(human bool) *Create { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Create) Pretty(pretty bool) *Create { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Aliases Aliases for the index. // API name: aliases func (r *Create) Aliases(aliases map[string]types.Alias) *Create { diff --git a/typedapi/indices/create/request.go b/typedapi/indices/create/request.go index 4cde470d95..801d0dfcc8 100644 --- a/typedapi/indices/create/request.go +++ b/typedapi/indices/create/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package create @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/create/IndicesCreateRequest.ts#L28-L81 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/create/IndicesCreateRequest.ts#L28-L81 type Request struct { // Aliases Aliases for the index. @@ -48,6 +48,7 @@ func NewRequest() *Request { r := &Request{ Aliases: make(map[string]types.Alias, 0), } + return r } diff --git a/typedapi/indices/create/response.go b/typedapi/indices/create/response.go index 0f932614d6..3acb7fd05a 100644 --- a/typedapi/indices/create/response.go +++ b/typedapi/indices/create/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package create // Response holds the response body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/create/IndicesCreateResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/create/IndicesCreateResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` Index string `json:"index"` diff --git a/typedapi/indices/createdatastream/create_data_stream.go b/typedapi/indices/createdatastream/create_data_stream.go index addb464b16..1c1dc03437 100644 --- a/typedapi/indices/createdatastream/create_data_stream.go +++ b/typedapi/indices/createdatastream/create_data_stream.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates a data stream +// Creates a data stream. +// You must have a matching index template with data stream enabled. package createdatastream import ( @@ -27,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +77,8 @@ func NewCreateDataStreamFunc(tp elastictransport.Interface) NewCreateDataStream } } -// Creates a data stream +// Creates a data stream. +// You must have a matching index template with data stream enabled. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html func New(tp elastictransport.Interface) *CreateDataStream { @@ -260,7 +262,7 @@ func (r CreateDataStream) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -303,3 +305,47 @@ func (r *CreateDataStream) _name(name string) *CreateDataStream { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *CreateDataStream) ErrorTrace(errortrace bool) *CreateDataStream { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *CreateDataStream) FilterPath(filterpaths ...string) *CreateDataStream { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *CreateDataStream) Human(human bool) *CreateDataStream { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *CreateDataStream) Pretty(pretty bool) *CreateDataStream { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/createdatastream/response.go b/typedapi/indices/createdatastream/response.go index 1f4b598be9..b57646af73 100644 --- a/typedapi/indices/createdatastream/response.go +++ b/typedapi/indices/createdatastream/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package createdatastream // Response holds the response body struct for the package createdatastream // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/create_data_stream/IndicesCreateDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/create_data_stream/IndicesCreateDataStreamResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/datastreamsstats/data_streams_stats.go b/typedapi/indices/datastreamsstats/data_streams_stats.go index 5b00c84b1d..90be1ec8cc 100644 --- a/typedapi/indices/datastreamsstats/data_streams_stats.go +++ b/typedapi/indices/datastreamsstats/data_streams_stats.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Provides statistics on operations happening in a data stream. +// Retrieves statistics for one or more data streams. package datastreamsstats import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -75,7 +75,7 @@ func NewDataStreamsStatsFunc(tp elastictransport.Interface) NewDataStreamsStats } } -// Provides statistics on operations happening in a data stream. +// Retrieves statistics for one or more data streams. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html func New(tp elastictransport.Interface) *DataStreamsStats { @@ -268,7 +268,7 @@ func (r DataStreamsStats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -319,3 +319,47 @@ func (r *DataStreamsStats) ExpandWildcards(expandwildcards ...expandwildcard.Exp return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DataStreamsStats) ErrorTrace(errortrace bool) *DataStreamsStats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DataStreamsStats) FilterPath(filterpaths ...string) *DataStreamsStats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DataStreamsStats) Human(human bool) *DataStreamsStats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DataStreamsStats) Pretty(pretty bool) *DataStreamsStats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/datastreamsstats/response.go b/typedapi/indices/datastreamsstats/response.go index 059ff69e3a..6f6bfd1c9a 100644 --- a/typedapi/indices/datastreamsstats/response.go +++ b/typedapi/indices/datastreamsstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package datastreamsstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package datastreamsstats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/data_streams_stats/IndicesDataStreamsStatsResponse.ts#L25-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/data_streams_stats/IndicesDataStreamsStatsResponse.ts#L25-L43 type Response struct { // BackingIndices Total number of backing indices for the selected data streams. diff --git a/typedapi/indices/delete/delete.go b/typedapi/indices/delete/delete.go index c56b869922..c7e5590793 100644 --- a/typedapi/indices/delete/delete.go +++ b/typedapi/indices/delete/delete.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes an index. +// Deletes one or more indices. package delete import ( @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -78,7 +77,7 @@ func NewDeleteFunc(tp elastictransport.Interface) NewDelete { } } -// Deletes an index. +// Deletes one or more indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-delete-index.html func New(tp elastictransport.Interface) *Delete { @@ -260,7 +259,7 @@ func (r Delete) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -355,3 +354,47 @@ func (r *Delete) Timeout(duration string) *Delete { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Delete) ErrorTrace(errortrace bool) *Delete { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Delete) FilterPath(filterpaths ...string) *Delete { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Delete) Human(human bool) *Delete { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Delete) Pretty(pretty bool) *Delete { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/delete/response.go b/typedapi/indices/delete/response.go index c7a14ad267..74b8d59f57 100644 --- a/typedapi/indices/delete/response.go +++ b/typedapi/indices/delete/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package delete @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/delete/IndicesDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/delete/IndicesDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/deletealias/delete_alias.go b/typedapi/indices/deletealias/delete_alias.go index afda554c4a..21a1c8958d 100644 --- a/typedapi/indices/deletealias/delete_alias.go +++ b/typedapi/indices/deletealias/delete_alias.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes an alias. +// Removes a data stream or index from an alias. package deletealias import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -81,7 +81,7 @@ func NewDeleteAliasFunc(tp elastictransport.Interface) NewDeleteAlias { } } -// Deletes an alias. +// Removes a data stream or index from an alias. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html func New(tp elastictransport.Interface) *DeleteAlias { @@ -288,7 +288,7 @@ func (r DeleteAlias) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -355,3 +355,47 @@ func (r *DeleteAlias) Timeout(duration string) *DeleteAlias { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteAlias) ErrorTrace(errortrace bool) *DeleteAlias { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteAlias) FilterPath(filterpaths ...string) *DeleteAlias { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteAlias) Human(human bool) *DeleteAlias { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteAlias) Pretty(pretty bool) *DeleteAlias { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/deletealias/response.go b/typedapi/indices/deletealias/response.go index 2a79747487..e30709d30f 100644 --- a/typedapi/indices/deletealias/response.go +++ b/typedapi/indices/deletealias/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletealias // Response holds the response body struct for the package deletealias // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/delete_alias/IndicesDeleteAliasResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/delete_alias/IndicesDeleteAliasResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/deletedatalifecycle/delete_data_lifecycle.go b/typedapi/indices/deletedatalifecycle/delete_data_lifecycle.go index 3bdce6ac7f..3ab5ada595 100644 --- a/typedapi/indices/deletedatalifecycle/delete_data_lifecycle.go +++ b/typedapi/indices/deletedatalifecycle/delete_data_lifecycle.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes the data stream lifecycle of the selected data streams. +// Removes the data lifecycle from a data stream rendering it not managed by the +// data stream lifecycle package deletedatalifecycle import ( @@ -27,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -77,7 +78,8 @@ func NewDeleteDataLifecycleFunc(tp elastictransport.Interface) NewDeleteDataLife } } -// Deletes the data stream lifecycle of the selected data streams. +// Removes the data lifecycle from a data stream rendering it not managed by the +// data stream lifecycle // // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams-delete-lifecycle.html func New(tp elastictransport.Interface) *DeleteDataLifecycle { @@ -263,7 +265,7 @@ func (r DeleteDataLifecycle) IsSuccess(providedCtx context.Context) (bool, error if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -329,3 +331,47 @@ func (r *DeleteDataLifecycle) Timeout(duration string) *DeleteDataLifecycle { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteDataLifecycle) ErrorTrace(errortrace bool) *DeleteDataLifecycle { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteDataLifecycle) FilterPath(filterpaths ...string) *DeleteDataLifecycle { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteDataLifecycle) Human(human bool) *DeleteDataLifecycle { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteDataLifecycle) Pretty(pretty bool) *DeleteDataLifecycle { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/deletedatalifecycle/response.go b/typedapi/indices/deletedatalifecycle/response.go index f62a89ebd1..2578e97fc0 100644 --- a/typedapi/indices/deletedatalifecycle/response.go +++ b/typedapi/indices/deletedatalifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletedatalifecycle // Response holds the response body struct for the package deletedatalifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/delete_data_lifecycle/IndicesDeleteDataLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/delete_data_lifecycle/IndicesDeleteDataLifecycleResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/deletedatastream/delete_data_stream.go b/typedapi/indices/deletedatastream/delete_data_stream.go index e4c6c5a1c5..51ccc4b2c6 100644 --- a/typedapi/indices/deletedatastream/delete_data_stream.go +++ b/typedapi/indices/deletedatastream/delete_data_stream.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes a data stream. +// Deletes one or more data streams and their backing indices. package deletedatastream import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -77,7 +77,7 @@ func NewDeleteDataStreamFunc(tp elastictransport.Interface) NewDeleteDataStream } } -// Deletes a data stream. +// Deletes one or more data streams and their backing indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html func New(tp elastictransport.Interface) *DeleteDataStream { @@ -261,7 +261,7 @@ func (r DeleteDataStream) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -311,3 +311,47 @@ func (r *DeleteDataStream) ExpandWildcards(expandwildcards ...expandwildcard.Exp return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteDataStream) ErrorTrace(errortrace bool) *DeleteDataStream { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteDataStream) FilterPath(filterpaths ...string) *DeleteDataStream { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteDataStream) Human(human bool) *DeleteDataStream { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteDataStream) Pretty(pretty bool) *DeleteDataStream { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/deletedatastream/response.go b/typedapi/indices/deletedatastream/response.go index 910ae55c6f..fad4492924 100644 --- a/typedapi/indices/deletedatastream/response.go +++ b/typedapi/indices/deletedatastream/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletedatastream // Response holds the response body struct for the package deletedatastream // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/delete_data_stream/IndicesDeleteDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/delete_data_stream/IndicesDeleteDataStreamResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/deleteindextemplate/delete_index_template.go b/typedapi/indices/deleteindextemplate/delete_index_template.go index 16842f19d4..5a954d22a4 100644 --- a/typedapi/indices/deleteindextemplate/delete_index_template.go +++ b/typedapi/indices/deleteindextemplate/delete_index_template.go @@ -16,9 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Deletes an index template. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Delete an index template. +// The provided may contain multiple template names separated +// by a comma. If multiple template +// names are specified then there is no wildcard support and the provided names +// should match completely with +// existing templates. package deleteindextemplate import ( @@ -27,9 +32,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +81,12 @@ func NewDeleteIndexTemplateFunc(tp elastictransport.Interface) NewDeleteIndexTem } } -// Deletes an index template. +// Delete an index template. +// The provided may contain multiple template names separated +// by a comma. If multiple template +// names are specified then there is no wildcard support and the provided names +// should match completely with +// existing templates. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-delete-template.html func New(tp elastictransport.Interface) *DeleteIndexTemplate { @@ -260,7 +270,7 @@ func (r DeleteIndexTemplate) IsSuccess(providedCtx context.Context) (bool, error if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -315,3 +325,47 @@ func (r *DeleteIndexTemplate) Timeout(duration string) *DeleteIndexTemplate { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteIndexTemplate) ErrorTrace(errortrace bool) *DeleteIndexTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteIndexTemplate) FilterPath(filterpaths ...string) *DeleteIndexTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteIndexTemplate) Human(human bool) *DeleteIndexTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteIndexTemplate) Pretty(pretty bool) *DeleteIndexTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/deleteindextemplate/response.go b/typedapi/indices/deleteindextemplate/response.go index 739da48abb..34fe7b1d54 100644 --- a/typedapi/indices/deleteindextemplate/response.go +++ b/typedapi/indices/deleteindextemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deleteindextemplate // Response holds the response body struct for the package deleteindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/delete_index_template/IndicesDeleteIndexTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/delete_index_template/IndicesDeleteIndexTemplateResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/deletetemplate/delete_template.go b/typedapi/indices/deletetemplate/delete_template.go index cb8bc2ccac..b3cb93d505 100644 --- a/typedapi/indices/deletetemplate/delete_template.go +++ b/typedapi/indices/deletetemplate/delete_template.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes an index template. +// Deletes a legacy index template. package deletetemplate import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +76,7 @@ func NewDeleteTemplateFunc(tp elastictransport.Interface) NewDeleteTemplate { } } -// Deletes an index template. +// Deletes a legacy index template. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-delete-template-v1.html func New(tp elastictransport.Interface) *DeleteTemplate { @@ -260,7 +260,7 @@ func (r DeleteTemplate) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -317,3 +317,47 @@ func (r *DeleteTemplate) Timeout(duration string) *DeleteTemplate { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteTemplate) ErrorTrace(errortrace bool) *DeleteTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteTemplate) FilterPath(filterpaths ...string) *DeleteTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteTemplate) Human(human bool) *DeleteTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteTemplate) Pretty(pretty bool) *DeleteTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/deletetemplate/response.go b/typedapi/indices/deletetemplate/response.go index 63a196ec5c..09c62f1f65 100644 --- a/typedapi/indices/deletetemplate/response.go +++ b/typedapi/indices/deletetemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletetemplate // Response holds the response body struct for the package deletetemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/delete_template/IndicesDeleteTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/delete_template/IndicesDeleteTemplateResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/diskusage/disk_usage.go b/typedapi/indices/diskusage/disk_usage.go index 650d9e004b..4916db0284 100644 --- a/typedapi/indices/diskusage/disk_usage.go +++ b/typedapi/indices/diskusage/disk_usage.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Analyzes the disk usage of each field of an index or data stream +// Analyzes the disk usage of each field of an index or data stream. package diskusage import ( @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -78,7 +77,7 @@ func NewDiskUsageFunc(tp elastictransport.Interface) NewDiskUsage { } } -// Analyzes the disk usage of each field of an index or data stream +// Analyzes the disk usage of each field of an index or data stream. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-disk-usage.html func New(tp elastictransport.Interface) *DiskUsage { @@ -262,7 +261,7 @@ func (r DiskUsage) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -354,3 +353,47 @@ func (r *DiskUsage) RunExpensiveTasks(runexpensivetasks bool) *DiskUsage { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DiskUsage) ErrorTrace(errortrace bool) *DiskUsage { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DiskUsage) FilterPath(filterpaths ...string) *DiskUsage { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DiskUsage) Human(human bool) *DiskUsage { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DiskUsage) Pretty(pretty bool) *DiskUsage { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/diskusage/response.go b/typedapi/indices/diskusage/response.go index 4aa10abe06..f65c63e1f9 100644 --- a/typedapi/indices/diskusage/response.go +++ b/typedapi/indices/diskusage/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package diskusage @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package diskusage // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/disk_usage/IndicesDiskUsageResponse.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/disk_usage/IndicesDiskUsageResponse.ts#L22-L25 type Response = json.RawMessage diff --git a/typedapi/indices/downsample/downsample.go b/typedapi/indices/downsample/downsample.go index a179716faf..aa066479ce 100644 --- a/typedapi/indices/downsample/downsample.go +++ b/typedapi/indices/downsample/downsample.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Downsample an index +// Aggregates a time series (TSDS) index and stores pre-computed statistical +// summaries (`min`, `max`, `sum`, `value_count` and `avg`) for each metric +// field grouped by a configured time interval. package downsample import ( @@ -30,6 +32,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -85,7 +88,9 @@ func NewDownsampleFunc(tp elastictransport.Interface) NewDownsample { } } -// Downsample an index +// Aggregates a time series (TSDS) index and stores pre-computed statistical +// summaries (`min`, `max`, `sum`, `value_count` and `avg`) for each metric +// field grouped by a configured time interval. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-downsample-data-stream.html func New(tp elastictransport.Interface) *Downsample { @@ -95,6 +100,8 @@ func New(tp elastictransport.Interface) *Downsample { headers: make(http.Header), buf: gobytes.NewBuffer(nil), + + req: NewRequest(), } if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { @@ -330,6 +337,50 @@ func (r *Downsample) _targetindex(targetindex string) *Downsample { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Downsample) ErrorTrace(errortrace bool) *Downsample { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Downsample) FilterPath(filterpaths ...string) *Downsample { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Downsample) Human(human bool) *Downsample { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Downsample) Pretty(pretty bool) *Downsample { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // FixedInterval The interval at which to aggregate the original time series index. // API name: fixed_interval func (r *Downsample) FixedInterval(durationlarge string) *Downsample { diff --git a/typedapi/indices/downsample/request.go b/typedapi/indices/downsample/request.go index be181bc428..276a6a4617 100644 --- a/typedapi/indices/downsample/request.go +++ b/typedapi/indices/downsample/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package downsample @@ -26,5 +26,12 @@ import ( // Request holds the request body struct for the package downsample // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/downsample/Request.ts#L24-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/downsample/Request.ts#L24-L44 type Request = types.DownsampleConfig + +// NewRequest returns a Request +func NewRequest() *Request { + r := types.NewDownsampleConfig() + + return r +} diff --git a/typedapi/indices/downsample/response.go b/typedapi/indices/downsample/response.go index fc74803dda..db9571102a 100644 --- a/typedapi/indices/downsample/response.go +++ b/typedapi/indices/downsample/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package downsample @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package downsample // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/downsample/Response.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/downsample/Response.ts#L22-L25 type Response = json.RawMessage diff --git a/typedapi/indices/exists/exists.go b/typedapi/indices/exists/exists.go index 81b6178835..1f2c561594 100644 --- a/typedapi/indices/exists/exists.go +++ b/typedapi/indices/exists/exists.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns information about whether a particular index exists. +// Checks if a data stream, index, or alias exists. package exists import ( @@ -26,7 +26,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -76,7 +75,7 @@ func NewExistsFunc(tp elastictransport.Interface) NewExists { } } -// Returns information about whether a particular index exists. +// Checks if a data stream, index, or alias exists. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-exists.html func New(tp elastictransport.Interface) *Exists { @@ -209,7 +208,7 @@ func (r Exists) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -305,3 +304,47 @@ func (r *Exists) Local(local bool) *Exists { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Exists) ErrorTrace(errortrace bool) *Exists { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Exists) FilterPath(filterpaths ...string) *Exists { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Exists) Human(human bool) *Exists { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Exists) Pretty(pretty bool) *Exists { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/existsalias/exists_alias.go b/typedapi/indices/existsalias/exists_alias.go index 704bfe26fb..b401ead318 100644 --- a/typedapi/indices/existsalias/exists_alias.go +++ b/typedapi/indices/existsalias/exists_alias.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns information about whether a particular alias exists. +// Checks if an alias exists. package existsalias import ( @@ -26,7 +26,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -79,7 +78,7 @@ func NewExistsAliasFunc(tp elastictransport.Interface) NewExistsAlias { } } -// Returns information about whether a particular alias exists. +// Checks if an alias exists. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html func New(tp elastictransport.Interface) *ExistsAlias { @@ -231,7 +230,7 @@ func (r ExistsAlias) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -322,3 +321,47 @@ func (r *ExistsAlias) Local(local bool) *ExistsAlias { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ExistsAlias) ErrorTrace(errortrace bool) *ExistsAlias { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ExistsAlias) FilterPath(filterpaths ...string) *ExistsAlias { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ExistsAlias) Human(human bool) *ExistsAlias { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ExistsAlias) Pretty(pretty bool) *ExistsAlias { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/existsindextemplate/exists_index_template.go b/typedapi/indices/existsindextemplate/exists_index_template.go index 043f98b7c7..25cafc6bf3 100644 --- a/typedapi/indices/existsindextemplate/exists_index_template.go +++ b/typedapi/indices/existsindextemplate/exists_index_template.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns information about whether a particular index template exists. package existsindextemplate @@ -26,9 +26,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -209,7 +209,7 @@ func (r ExistsIndexTemplate) IsSuccess(providedCtx context.Context) (bool, error if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -255,3 +255,47 @@ func (r *ExistsIndexTemplate) MasterTimeout(duration string) *ExistsIndexTemplat return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ExistsIndexTemplate) ErrorTrace(errortrace bool) *ExistsIndexTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ExistsIndexTemplate) FilterPath(filterpaths ...string) *ExistsIndexTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ExistsIndexTemplate) Human(human bool) *ExistsIndexTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ExistsIndexTemplate) Pretty(pretty bool) *ExistsIndexTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/existstemplate/exists_template.go b/typedapi/indices/existstemplate/exists_template.go index 358b5c8ebf..02bf0c1d18 100644 --- a/typedapi/indices/existstemplate/exists_template.go +++ b/typedapi/indices/existstemplate/exists_template.go @@ -16,8 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed +// Check existence of index templates. // Returns information about whether a particular index template exists. package existstemplate @@ -26,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -75,6 +75,7 @@ func NewExistsTemplateFunc(tp elastictransport.Interface) NewExistsTemplate { } } +// Check existence of index templates. // Returns information about whether a particular index template exists. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-template-exists-v1.html @@ -210,7 +211,7 @@ func (r ExistsTemplate) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -271,3 +272,47 @@ func (r *ExistsTemplate) MasterTimeout(duration string) *ExistsTemplate { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ExistsTemplate) ErrorTrace(errortrace bool) *ExistsTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ExistsTemplate) FilterPath(filterpaths ...string) *ExistsTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ExistsTemplate) Human(human bool) *ExistsTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ExistsTemplate) Pretty(pretty bool) *ExistsTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/explaindatalifecycle/explain_data_lifecycle.go b/typedapi/indices/explaindatalifecycle/explain_data_lifecycle.go index 1487dfd5d7..3b42044be3 100644 --- a/typedapi/indices/explaindatalifecycle/explain_data_lifecycle.go +++ b/typedapi/indices/explaindatalifecycle/explain_data_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves information about the index's current data stream lifecycle, such // as any potential encountered error, time since creation etc. @@ -28,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -265,7 +264,7 @@ func (r ExplainDataLifecycle) IsSuccess(providedCtx context.Context) (bool, erro if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -318,3 +317,47 @@ func (r *ExplainDataLifecycle) MasterTimeout(duration string) *ExplainDataLifecy return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ExplainDataLifecycle) ErrorTrace(errortrace bool) *ExplainDataLifecycle { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ExplainDataLifecycle) FilterPath(filterpaths ...string) *ExplainDataLifecycle { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ExplainDataLifecycle) Human(human bool) *ExplainDataLifecycle { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ExplainDataLifecycle) Pretty(pretty bool) *ExplainDataLifecycle { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/explaindatalifecycle/response.go b/typedapi/indices/explaindatalifecycle/response.go index 0d492f81f9..8f6acbba25 100644 --- a/typedapi/indices/explaindatalifecycle/response.go +++ b/typedapi/indices/explaindatalifecycle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package explaindatalifecycle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package explaindatalifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/explain_data_lifecycle/IndicesExplainDataLifecycleResponse.ts#L25-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/explain_data_lifecycle/IndicesExplainDataLifecycleResponse.ts#L25-L29 type Response struct { Indices map[string]types.DataStreamLifecycleExplain `json:"indices"` } diff --git a/typedapi/indices/fieldusagestats/field_usage_stats.go b/typedapi/indices/fieldusagestats/field_usage_stats.go index 4bc2f7a321..a368324e61 100644 --- a/typedapi/indices/fieldusagestats/field_usage_stats.go +++ b/typedapi/indices/fieldusagestats/field_usage_stats.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns the field usage stats for each field of an index +// Returns field usage information for each shard and field of an index. package fieldusagestats import ( @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -78,7 +77,7 @@ func NewFieldUsageStatsFunc(tp elastictransport.Interface) NewFieldUsageStats { } } -// Returns the field usage stats for each field of an index +// Returns field usage information for each shard and field of an index. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/field-usage-stats.html func New(tp elastictransport.Interface) *FieldUsageStats { @@ -262,7 +261,7 @@ func (r FieldUsageStats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -374,3 +373,47 @@ func (r *FieldUsageStats) WaitForActiveShards(waitforactiveshards string) *Field return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *FieldUsageStats) ErrorTrace(errortrace bool) *FieldUsageStats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *FieldUsageStats) FilterPath(filterpaths ...string) *FieldUsageStats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *FieldUsageStats) Human(human bool) *FieldUsageStats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *FieldUsageStats) Pretty(pretty bool) *FieldUsageStats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/fieldusagestats/response.go b/typedapi/indices/fieldusagestats/response.go index 0770ddbc11..6e9ffe7b59 100644 --- a/typedapi/indices/fieldusagestats/response.go +++ b/typedapi/indices/fieldusagestats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package fieldusagestats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package fieldusagestats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L28-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L28-L30 type Response struct { FieldsUsageBody map[string]types.UsageStatsIndex `json:"-"` Shards_ types.ShardStatistics `json:"_shards"` diff --git a/typedapi/indices/flush/flush.go b/typedapi/indices/flush/flush.go index 55f3df7b02..d952c66bc7 100644 --- a/typedapi/indices/flush/flush.go +++ b/typedapi/indices/flush/flush.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Performs the flush operation on one or more indices. +// Flushes one or more data streams or indices. package flush import ( @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -76,7 +75,7 @@ func NewFlushFunc(tp elastictransport.Interface) NewFlush { } } -// Performs the flush operation on one or more indices. +// Flushes one or more data streams or indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-flush.html func New(tp elastictransport.Interface) *Flush { @@ -265,7 +264,7 @@ func (r Flush) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -359,3 +358,47 @@ func (r *Flush) WaitIfOngoing(waitifongoing bool) *Flush { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Flush) ErrorTrace(errortrace bool) *Flush { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Flush) FilterPath(filterpaths ...string) *Flush { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Flush) Human(human bool) *Flush { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Flush) Pretty(pretty bool) *Flush { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/flush/response.go b/typedapi/indices/flush/response.go index afbe1c3c35..49a0cc04fb 100644 --- a/typedapi/indices/flush/response.go +++ b/typedapi/indices/flush/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package flush @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package flush // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/flush/IndicesFlushResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/flush/IndicesFlushResponse.ts#L22-L24 type Response struct { Shards_ types.ShardStatistics `json:"_shards"` } diff --git a/typedapi/indices/forcemerge/forcemerge.go b/typedapi/indices/forcemerge/forcemerge.go index 50c446b08f..ae3bc230df 100644 --- a/typedapi/indices/forcemerge/forcemerge.go +++ b/typedapi/indices/forcemerge/forcemerge.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Performs the force merge operation on one or more indices. package forcemerge @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -265,7 +264,7 @@ func (r Forcemerge) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -366,3 +365,47 @@ func (r *Forcemerge) WaitForCompletion(waitforcompletion bool) *Forcemerge { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Forcemerge) ErrorTrace(errortrace bool) *Forcemerge { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Forcemerge) FilterPath(filterpaths ...string) *Forcemerge { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Forcemerge) Human(human bool) *Forcemerge { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Forcemerge) Pretty(pretty bool) *Forcemerge { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/forcemerge/response.go b/typedapi/indices/forcemerge/response.go index 032cb034cd..9856f41554 100644 --- a/typedapi/indices/forcemerge/response.go +++ b/typedapi/indices/forcemerge/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package forcemerge @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package forcemerge // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/forcemerge/IndicesForceMergeResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/forcemerge/IndicesForceMergeResponse.ts#L22-L24 type Response struct { Shards_ types.ShardStatistics `json:"_shards"` // Task task contains a task id returned when wait_for_completion=false, diff --git a/typedapi/indices/get/get.go b/typedapi/indices/get/get.go index de357b4a60..9251b92593 100644 --- a/typedapi/indices/get/get.go +++ b/typedapi/indices/get/get.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns information about one or more indices. +// Returns information about one or more indices. For data streams, the API +// returns information about the +// stream’s backing indices. package get import ( @@ -27,7 +29,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -79,7 +80,9 @@ func NewGetFunc(tp elastictransport.Interface) NewGet { } } -// Returns information about one or more indices. +// Returns information about one or more indices. For data streams, the API +// returns information about the +// stream’s backing indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-index.html func New(tp elastictransport.Interface) *Get { @@ -261,7 +264,7 @@ func (r Get) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -382,3 +385,47 @@ func (r *Get) Features(features ...feature.Feature) *Get { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Get) ErrorTrace(errortrace bool) *Get { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Get) FilterPath(filterpaths ...string) *Get { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Get) Human(human bool) *Get { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Get) Pretty(pretty bool) *Get { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/get/response.go b/typedapi/indices/get/response.go index 5ef5f18eae..330d554677 100644 --- a/typedapi/indices/get/response.go +++ b/typedapi/indices/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get/IndicesGetResponse.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get/IndicesGetResponse.ts#L24-L27 type Response map[string]types.IndexState diff --git a/typedapi/indices/getalias/get_alias.go b/typedapi/indices/getalias/get_alias.go index b6f868512f..4d67d9d859 100644 --- a/typedapi/indices/getalias/get_alias.go +++ b/typedapi/indices/getalias/get_alias.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns an alias. +// Retrieves information for one or more aliases. package getalias import ( @@ -28,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -80,7 +79,7 @@ func NewGetAliasFunc(tp elastictransport.Interface) NewGetAlias { } } -// Returns an alias. +// Retrieves information for one or more aliases. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html func New(tp elastictransport.Interface) *GetAlias { @@ -261,7 +260,7 @@ func (r GetAlias) Do(providedCtx context.Context) (Response, error) { } if res.StatusCode == 404 { - data, err := ioutil.ReadAll(res.Body) + data, err := io.ReadAll(res.Body) if err != nil { if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, err) @@ -333,7 +332,7 @@ func (r GetAlias) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -426,3 +425,47 @@ func (r *GetAlias) Local(local bool) *GetAlias { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetAlias) ErrorTrace(errortrace bool) *GetAlias { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetAlias) FilterPath(filterpaths ...string) *GetAlias { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetAlias) Human(human bool) *GetAlias { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetAlias) Pretty(pretty bool) *GetAlias { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/getalias/response.go b/typedapi/indices/getalias/response.go index 73d61ef216..9d98af580c 100644 --- a/typedapi/indices/getalias/response.go +++ b/typedapi/indices/getalias/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getalias @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getalias // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get_alias/IndicesGetAliasResponse.ts#L26-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get_alias/IndicesGetAliasResponse.ts#L26-L35 type Response map[string]types.IndexAliases diff --git a/typedapi/indices/getdatalifecycle/get_data_lifecycle.go b/typedapi/indices/getdatalifecycle/get_data_lifecycle.go index 5ffbee239f..a188237e41 100644 --- a/typedapi/indices/getdatalifecycle/get_data_lifecycle.go +++ b/typedapi/indices/getdatalifecycle/get_data_lifecycle.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns the data stream lifecycle of the selected data streams. +// Retrieves the data stream lifecycle configuration of one or more data +// streams. package getdatalifecycle import ( @@ -27,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -78,7 +78,8 @@ func NewGetDataLifecycleFunc(tp elastictransport.Interface) NewGetDataLifecycle } } -// Returns the data stream lifecycle of the selected data streams. +// Retrieves the data stream lifecycle configuration of one or more data +// streams. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams-get-lifecycle.html func New(tp elastictransport.Interface) *GetDataLifecycle { @@ -264,7 +265,7 @@ func (r GetDataLifecycle) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -324,3 +325,47 @@ func (r *GetDataLifecycle) IncludeDefaults(includedefaults bool) *GetDataLifecyc return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetDataLifecycle) ErrorTrace(errortrace bool) *GetDataLifecycle { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetDataLifecycle) FilterPath(filterpaths ...string) *GetDataLifecycle { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetDataLifecycle) Human(human bool) *GetDataLifecycle { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetDataLifecycle) Pretty(pretty bool) *GetDataLifecycle { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/getdatalifecycle/response.go b/typedapi/indices/getdatalifecycle/response.go index 8fb5eea6dc..714acdc88c 100644 --- a/typedapi/indices/getdatalifecycle/response.go +++ b/typedapi/indices/getdatalifecycle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getdatalifecycle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdatalifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get_data_lifecycle/IndicesGetDataLifecycleResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get_data_lifecycle/IndicesGetDataLifecycleResponse.ts#L23-L25 type Response struct { DataStreams []types.DataStreamWithLifecycle `json:"data_streams"` } diff --git a/typedapi/indices/getdatastream/get_data_stream.go b/typedapi/indices/getdatastream/get_data_stream.go index 5bd44be1e8..c8b08a66ca 100644 --- a/typedapi/indices/getdatastream/get_data_stream.go +++ b/typedapi/indices/getdatastream/get_data_stream.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns data streams. +// Retrieves information about one or more data streams. package getdatastream import ( @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -76,7 +75,7 @@ func NewGetDataStreamFunc(tp elastictransport.Interface) NewGetDataStream { } } -// Returns data streams. +// Retrieves information about one or more data streams. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html func New(tp elastictransport.Interface) *GetDataStream { @@ -265,7 +264,7 @@ func (r GetDataStream) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -324,3 +323,47 @@ func (r *GetDataStream) IncludeDefaults(includedefaults bool) *GetDataStream { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetDataStream) ErrorTrace(errortrace bool) *GetDataStream { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetDataStream) FilterPath(filterpaths ...string) *GetDataStream { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetDataStream) Human(human bool) *GetDataStream { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetDataStream) Pretty(pretty bool) *GetDataStream { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/getdatastream/response.go b/typedapi/indices/getdatastream/response.go index 49f0c9afce..515fa45324 100644 --- a/typedapi/indices/getdatastream/response.go +++ b/typedapi/indices/getdatastream/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getdatastream @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdatastream // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get_data_stream/IndicesGetDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get_data_stream/IndicesGetDataStreamResponse.ts#L22-L24 type Response struct { DataStreams []types.DataStream `json:"data_streams"` } diff --git a/typedapi/indices/getfieldmapping/get_field_mapping.go b/typedapi/indices/getfieldmapping/get_field_mapping.go index 2f0c334db4..4982491e20 100644 --- a/typedapi/indices/getfieldmapping/get_field_mapping.go +++ b/typedapi/indices/getfieldmapping/get_field_mapping.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns mapping for one or more fields. +// Retrieves mapping definitions for one or more fields. +// For data streams, the API retrieves field mappings for the stream’s backing +// indices. package getfieldmapping import ( @@ -27,7 +29,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -81,7 +82,9 @@ func NewGetFieldMappingFunc(tp elastictransport.Interface) NewGetFieldMapping { } } -// Returns mapping for one or more fields. +// Retrieves mapping definitions for one or more fields. +// For data streams, the API retrieves field mappings for the stream’s backing +// indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-field-mapping.html func New(tp elastictransport.Interface) *GetFieldMapping { @@ -286,7 +289,7 @@ func (r GetFieldMapping) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -387,3 +390,47 @@ func (r *GetFieldMapping) Local(local bool) *GetFieldMapping { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetFieldMapping) ErrorTrace(errortrace bool) *GetFieldMapping { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetFieldMapping) FilterPath(filterpaths ...string) *GetFieldMapping { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetFieldMapping) Human(human bool) *GetFieldMapping { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetFieldMapping) Pretty(pretty bool) *GetFieldMapping { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/getfieldmapping/response.go b/typedapi/indices/getfieldmapping/response.go index 6fe79da8a9..e1c9c8f10b 100644 --- a/typedapi/indices/getfieldmapping/response.go +++ b/typedapi/indices/getfieldmapping/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getfieldmapping @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getfieldmapping // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get_field_mapping/IndicesGetFieldMappingResponse.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get_field_mapping/IndicesGetFieldMappingResponse.ts#L24-L27 type Response map[string]types.TypeFieldMappings diff --git a/typedapi/indices/getindextemplate/get_index_template.go b/typedapi/indices/getindextemplate/get_index_template.go index 2942572d10..9d4fb07a34 100644 --- a/typedapi/indices/getindextemplate/get_index_template.go +++ b/typedapi/indices/getindextemplate/get_index_template.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns an index template. +// Get index templates. +// Returns information about one or more index templates. package getindextemplate import ( @@ -27,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -75,7 +75,8 @@ func NewGetIndexTemplateFunc(tp elastictransport.Interface) NewGetIndexTemplate } } -// Returns an index template. +// Get index templates. +// Returns information about one or more index templates. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-template.html func New(tp elastictransport.Interface) *GetIndexTemplate { @@ -264,7 +265,7 @@ func (r GetIndexTemplate) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -335,3 +336,47 @@ func (r *GetIndexTemplate) IncludeDefaults(includedefaults bool) *GetIndexTempla return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetIndexTemplate) ErrorTrace(errortrace bool) *GetIndexTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetIndexTemplate) FilterPath(filterpaths ...string) *GetIndexTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetIndexTemplate) Human(human bool) *GetIndexTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetIndexTemplate) Pretty(pretty bool) *GetIndexTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/getindextemplate/response.go b/typedapi/indices/getindextemplate/response.go index 8557ffad6a..6b54e1643e 100644 --- a/typedapi/indices/getindextemplate/response.go +++ b/typedapi/indices/getindextemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getindextemplate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get_index_template/IndicesGetIndexTemplateResponse.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get_index_template/IndicesGetIndexTemplateResponse.ts#L23-L27 type Response struct { IndexTemplates []types.IndexTemplateItem `json:"index_templates"` } diff --git a/typedapi/indices/getmapping/get_mapping.go b/typedapi/indices/getmapping/get_mapping.go index 7a1a8ad0b1..7345531fb1 100644 --- a/typedapi/indices/getmapping/get_mapping.go +++ b/typedapi/indices/getmapping/get_mapping.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns mappings for one or more indices. +// Retrieves mapping definitions for one or more indices. +// For data streams, the API retrieves mappings for the stream’s backing +// indices. package getmapping import ( @@ -27,7 +29,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -76,7 +77,9 @@ func NewGetMappingFunc(tp elastictransport.Interface) NewGetMapping { } } -// Returns mappings for one or more indices. +// Retrieves mapping definitions for one or more indices. +// For data streams, the API retrieves mappings for the stream’s backing +// indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-mapping.html func New(tp elastictransport.Interface) *GetMapping { @@ -265,7 +268,7 @@ func (r GetMapping) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -358,3 +361,47 @@ func (r *GetMapping) MasterTimeout(duration string) *GetMapping { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetMapping) ErrorTrace(errortrace bool) *GetMapping { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetMapping) FilterPath(filterpaths ...string) *GetMapping { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetMapping) Human(human bool) *GetMapping { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetMapping) Pretty(pretty bool) *GetMapping { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/getmapping/response.go b/typedapi/indices/getmapping/response.go index c0e0519584..416e353b26 100644 --- a/typedapi/indices/getmapping/response.go +++ b/typedapi/indices/getmapping/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getmapping @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getmapping // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get_mapping/IndicesGetMappingResponse.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get_mapping/IndicesGetMappingResponse.ts#L24-L27 type Response map[string]types.IndexMappingRecord diff --git a/typedapi/indices/getsettings/get_settings.go b/typedapi/indices/getsettings/get_settings.go index 35eed79370..2a0c49569b 100644 --- a/typedapi/indices/getsettings/get_settings.go +++ b/typedapi/indices/getsettings/get_settings.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns settings for one or more indices. +// Returns setting information for one or more indices. For data streams, +// returns setting information for the stream’s backing indices. package getsettings import ( @@ -27,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -79,7 +79,8 @@ func NewGetSettingsFunc(tp elastictransport.Interface) NewGetSettings { } } -// Returns settings for one or more indices. +// Returns setting information for one or more indices. For data streams, +// returns setting information for the stream’s backing indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-settings.html func New(tp elastictransport.Interface) *GetSettings { @@ -296,7 +297,7 @@ func (r GetSettings) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -414,3 +415,47 @@ func (r *GetSettings) MasterTimeout(duration string) *GetSettings { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetSettings) ErrorTrace(errortrace bool) *GetSettings { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetSettings) FilterPath(filterpaths ...string) *GetSettings { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetSettings) Human(human bool) *GetSettings { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetSettings) Pretty(pretty bool) *GetSettings { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/getsettings/response.go b/typedapi/indices/getsettings/response.go index fdcc372d7d..f6c85921cc 100644 --- a/typedapi/indices/getsettings/response.go +++ b/typedapi/indices/getsettings/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getsettings @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getsettings // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get_settings/IndicesGetSettingsResponse.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get_settings/IndicesGetSettingsResponse.ts#L24-L27 type Response map[string]types.IndexState diff --git a/typedapi/indices/gettemplate/get_template.go b/typedapi/indices/gettemplate/get_template.go index 3c70050036..80ae873eb9 100644 --- a/typedapi/indices/gettemplate/get_template.go +++ b/typedapi/indices/gettemplate/get_template.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns an index template. +// Get index templates. +// Retrieves information about one or more index templates. package gettemplate import ( @@ -27,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -75,7 +75,8 @@ func NewGetTemplateFunc(tp elastictransport.Interface) NewGetTemplate { } } -// Returns an index template. +// Get index templates. +// Retrieves information about one or more index templates. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-get-template-v1.html func New(tp elastictransport.Interface) *GetTemplate { @@ -264,7 +265,7 @@ func (r GetTemplate) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -329,3 +330,47 @@ func (r *GetTemplate) MasterTimeout(duration string) *GetTemplate { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetTemplate) ErrorTrace(errortrace bool) *GetTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetTemplate) FilterPath(filterpaths ...string) *GetTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetTemplate) Human(human bool) *GetTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetTemplate) Pretty(pretty bool) *GetTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/gettemplate/response.go b/typedapi/indices/gettemplate/response.go index 2bd3d05659..9f82fb4d5d 100644 --- a/typedapi/indices/gettemplate/response.go +++ b/typedapi/indices/gettemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package gettemplate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get_template/IndicesGetTemplateResponse.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get_template/IndicesGetTemplateResponse.ts#L23-L26 type Response map[string]types.TemplateMapping diff --git a/typedapi/indices/migratetodatastream/migrate_to_data_stream.go b/typedapi/indices/migratetodatastream/migrate_to_data_stream.go index eaf6062606..66ac58d21b 100644 --- a/typedapi/indices/migratetodatastream/migrate_to_data_stream.go +++ b/typedapi/indices/migratetodatastream/migrate_to_data_stream.go @@ -16,9 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Migrates an alias to a data stream +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Converts an index alias to a data stream. +// You must have a matching index template that is data stream enabled. +// The alias must meet the following criteria: +// The alias must have a write index; +// All indices for the alias must have a `@timestamp` field mapping of a `date` +// or `date_nanos` field type; +// The alias must not have any filters; +// The alias must not use custom routing. +// If successful, the request removes the alias and creates a data stream with +// the same name. +// The indices for the alias become hidden backing indices for the stream. +// The write index for the alias becomes the write index for the stream. package migratetodatastream import ( @@ -27,9 +38,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +87,18 @@ func NewMigrateToDataStreamFunc(tp elastictransport.Interface) NewMigrateToDataS } } -// Migrates an alias to a data stream +// Converts an index alias to a data stream. +// You must have a matching index template that is data stream enabled. +// The alias must meet the following criteria: +// The alias must have a write index; +// All indices for the alias must have a `@timestamp` field mapping of a `date` +// or `date_nanos` field type; +// The alias must not have any filters; +// The alias must not use custom routing. +// If successful, the request removes the alias and creates a data stream with +// the same name. +// The indices for the alias become hidden backing indices for the stream. +// The write index for the alias becomes the write index for the stream. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html func New(tp elastictransport.Interface) *MigrateToDataStream { @@ -262,7 +284,7 @@ func (r MigrateToDataStream) IsSuccess(providedCtx context.Context) (bool, error if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -298,3 +320,47 @@ func (r *MigrateToDataStream) _name(name string) *MigrateToDataStream { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *MigrateToDataStream) ErrorTrace(errortrace bool) *MigrateToDataStream { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *MigrateToDataStream) FilterPath(filterpaths ...string) *MigrateToDataStream { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *MigrateToDataStream) Human(human bool) *MigrateToDataStream { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *MigrateToDataStream) Pretty(pretty bool) *MigrateToDataStream { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/migratetodatastream/response.go b/typedapi/indices/migratetodatastream/response.go index e00816660a..bdd32b23e1 100644 --- a/typedapi/indices/migratetodatastream/response.go +++ b/typedapi/indices/migratetodatastream/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package migratetodatastream // Response holds the response body struct for the package migratetodatastream // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/migrate_to_data_stream/IndicesMigrateToDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/migrate_to_data_stream/IndicesMigrateToDataStreamResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/modifydatastream/modify_data_stream.go b/typedapi/indices/modifydatastream/modify_data_stream.go index 85971903dc..025a82004a 100644 --- a/typedapi/indices/modifydatastream/modify_data_stream.go +++ b/typedapi/indices/modifydatastream/modify_data_stream.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Modifies a data stream +// Performs one or more data stream modification actions in a single atomic +// operation. package modifydatastream import ( @@ -30,6 +31,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -72,7 +74,8 @@ func NewModifyDataStreamFunc(tp elastictransport.Interface) NewModifyDataStream } } -// Modifies a data stream +// Performs one or more data stream modification actions in a single atomic +// operation. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams.html func New(tp elastictransport.Interface) *ModifyDataStream { @@ -291,6 +294,50 @@ func (r *ModifyDataStream) Header(key, value string) *ModifyDataStream { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ModifyDataStream) ErrorTrace(errortrace bool) *ModifyDataStream { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ModifyDataStream) FilterPath(filterpaths ...string) *ModifyDataStream { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ModifyDataStream) Human(human bool) *ModifyDataStream { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ModifyDataStream) Pretty(pretty bool) *ModifyDataStream { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Actions Actions to perform. // API name: actions func (r *ModifyDataStream) Actions(actions ...types.IndicesModifyAction) *ModifyDataStream { diff --git a/typedapi/indices/modifydatastream/request.go b/typedapi/indices/modifydatastream/request.go index 90cdea1006..c6bb424ef4 100644 --- a/typedapi/indices/modifydatastream/request.go +++ b/typedapi/indices/modifydatastream/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package modifydatastream @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package modifydatastream // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/modify_data_stream/IndicesModifyDataStreamRequest.ts#L23-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/modify_data_stream/IndicesModifyDataStreamRequest.ts#L23-L36 type Request struct { // Actions Actions to perform. @@ -39,6 +39,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/indices/modifydatastream/response.go b/typedapi/indices/modifydatastream/response.go index 79aa9ec366..2681fb1e76 100644 --- a/typedapi/indices/modifydatastream/response.go +++ b/typedapi/indices/modifydatastream/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package modifydatastream // Response holds the response body struct for the package modifydatastream // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/modify_data_stream/IndicesModifyDataStreamResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/modify_data_stream/IndicesModifyDataStreamResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/open/open.go b/typedapi/indices/open/open.go index f2a46281a5..c702eef849 100644 --- a/typedapi/indices/open/open.go +++ b/typedapi/indices/open/open.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Opens an index. +// Opens a closed index. +// For data streams, the API opens any closed backing indices. package open import ( @@ -27,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -78,7 +78,8 @@ func NewOpenFunc(tp elastictransport.Interface) NewOpen { } } -// Opens an index. +// Opens a closed index. +// For data streams, the API opens any closed backing indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-open-close.html func New(tp elastictransport.Interface) *Open { @@ -262,7 +263,7 @@ func (r Open) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -372,3 +373,47 @@ func (r *Open) WaitForActiveShards(waitforactiveshards string) *Open { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Open) ErrorTrace(errortrace bool) *Open { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Open) FilterPath(filterpaths ...string) *Open { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Open) Human(human bool) *Open { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Open) Pretty(pretty bool) *Open { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/open/response.go b/typedapi/indices/open/response.go index 56cc103ef5..d67d245c0d 100644 --- a/typedapi/indices/open/response.go +++ b/typedapi/indices/open/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package open // Response holds the response body struct for the package open // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/open/IndicesOpenResponse.ts#L20-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/open/IndicesOpenResponse.ts#L20-L25 type Response struct { Acknowledged bool `json:"acknowledged"` ShardsAcknowledged bool `json:"shards_acknowledged"` diff --git a/typedapi/indices/promotedatastream/promote_data_stream.go b/typedapi/indices/promotedatastream/promote_data_stream.go index 1609344198..50acba5e6a 100644 --- a/typedapi/indices/promotedatastream/promote_data_stream.go +++ b/typedapi/indices/promotedatastream/promote_data_stream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Promotes a data stream from a replicated data stream managed by CCR to a // regular data stream @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r PromoteDataStream) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -300,3 +300,47 @@ func (r *PromoteDataStream) _name(name string) *PromoteDataStream { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PromoteDataStream) ErrorTrace(errortrace bool) *PromoteDataStream { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PromoteDataStream) FilterPath(filterpaths ...string) *PromoteDataStream { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PromoteDataStream) Human(human bool) *PromoteDataStream { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PromoteDataStream) Pretty(pretty bool) *PromoteDataStream { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/promotedatastream/response.go b/typedapi/indices/promotedatastream/response.go index 9f2f6d2ed0..0078c37907 100644 --- a/typedapi/indices/promotedatastream/response.go +++ b/typedapi/indices/promotedatastream/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package promotedatastream @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package promotedatastream // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/promote_data_stream/IndicesPromoteDataStreamResponse.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/promote_data_stream/IndicesPromoteDataStreamResponse.ts#L22-L25 type Response = json.RawMessage diff --git a/typedapi/indices/putalias/put_alias.go b/typedapi/indices/putalias/put_alias.go index c5a307e819..48981a77bc 100644 --- a/typedapi/indices/putalias/put_alias.go +++ b/typedapi/indices/putalias/put_alias.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates or updates an alias. +// Adds a data stream or index to an alias. package putalias import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -85,7 +86,7 @@ func NewPutAliasFunc(tp elastictransport.Interface) NewPutAlias { } } -// Creates or updates an alias. +// Adds a data stream or index to an alias. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html func New(tp elastictransport.Interface) *PutAlias { @@ -373,6 +374,50 @@ func (r *PutAlias) Timeout(duration string) *PutAlias { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutAlias) ErrorTrace(errortrace bool) *PutAlias { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutAlias) FilterPath(filterpaths ...string) *PutAlias { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutAlias) Human(human bool) *PutAlias { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutAlias) Pretty(pretty bool) *PutAlias { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Filter Query used to limit documents the alias can access. // API name: filter func (r *PutAlias) Filter(filter *types.Query) *PutAlias { diff --git a/typedapi/indices/putalias/request.go b/typedapi/indices/putalias/request.go index 73049dfa99..1b7a0d40fb 100644 --- a/typedapi/indices/putalias/request.go +++ b/typedapi/indices/putalias/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putalias @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putalias // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/put_alias/IndicesPutAliasRequest.ts#L25-L91 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/put_alias/IndicesPutAliasRequest.ts#L25-L91 type Request struct { // Filter Query used to limit documents the alias can access. @@ -62,6 +62,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -102,7 +103,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "is_write_index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/indices/putalias/response.go b/typedapi/indices/putalias/response.go index 0c7cda9ead..cafc536dea 100644 --- a/typedapi/indices/putalias/response.go +++ b/typedapi/indices/putalias/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putalias // Response holds the response body struct for the package putalias // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/put_alias/IndicesPutAliasResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/put_alias/IndicesPutAliasResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/putdatalifecycle/put_data_lifecycle.go b/typedapi/indices/putdatalifecycle/put_data_lifecycle.go index 4c1da69e4b..50ce21cfee 100644 --- a/typedapi/indices/putdatalifecycle/put_data_lifecycle.go +++ b/typedapi/indices/putdatalifecycle/put_data_lifecycle.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Updates the data stream lifecycle of the selected data streams. +// Update the data lifecycle of the specified data streams. package putdatalifecycle import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -81,7 +82,7 @@ func NewPutDataLifecycleFunc(tp elastictransport.Interface) NewPutDataLifecycle } } -// Updates the data stream lifecycle of the selected data streams. +// Update the data lifecycle of the specified data streams. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/data-streams-put-lifecycle.html func New(tp elastictransport.Interface) *PutDataLifecycle { @@ -345,6 +346,50 @@ func (r *PutDataLifecycle) Timeout(duration string) *PutDataLifecycle { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutDataLifecycle) ErrorTrace(errortrace bool) *PutDataLifecycle { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutDataLifecycle) FilterPath(filterpaths ...string) *PutDataLifecycle { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutDataLifecycle) Human(human bool) *PutDataLifecycle { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutDataLifecycle) Pretty(pretty bool) *PutDataLifecycle { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // DataRetention If defined, every document added to this data stream will be stored at least // for this time frame. // Any time after this duration the document could be deleted. diff --git a/typedapi/indices/putdatalifecycle/request.go b/typedapi/indices/putdatalifecycle/request.go index 68a9b29fbd..432c75b1fa 100644 --- a/typedapi/indices/putdatalifecycle/request.go +++ b/typedapi/indices/putdatalifecycle/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putdatalifecycle @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package putdatalifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/put_data_lifecycle/IndicesPutDataLifecycleRequest.ts#L25-L75 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/put_data_lifecycle/IndicesPutDataLifecycleRequest.ts#L25-L75 type Request struct { // DataRetention If defined, every document added to this data stream will be stored at least @@ -49,6 +49,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/indices/putdatalifecycle/response.go b/typedapi/indices/putdatalifecycle/response.go index f5a5c2eb42..41137fc802 100644 --- a/typedapi/indices/putdatalifecycle/response.go +++ b/typedapi/indices/putdatalifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putdatalifecycle // Response holds the response body struct for the package putdatalifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/put_data_lifecycle/IndicesPutDataLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/put_data_lifecycle/IndicesPutDataLifecycleResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/putindextemplate/put_index_template.go b/typedapi/indices/putindextemplate/put_index_template.go index 9184a31d66..36ace679fb 100644 --- a/typedapi/indices/putindextemplate/put_index_template.go +++ b/typedapi/indices/putindextemplate/put_index_template.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates or updates an index template. +// Create or update an index template. +// Index templates define settings, mappings, and aliases that can be applied +// automatically to new indices. package putindextemplate import ( @@ -81,7 +83,9 @@ func NewPutIndexTemplateFunc(tp elastictransport.Interface) NewPutIndexTemplate } } -// Creates or updates an index template. +// Create or update an index template. +// Index templates define settings, mappings, and aliases that can be applied +// automatically to new indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-put-template.html func New(tp elastictransport.Interface) *PutIndexTemplate { @@ -321,6 +325,82 @@ func (r *PutIndexTemplate) Create(create bool) *PutIndexTemplate { return r } +// MasterTimeout Period to wait for a connection to the master node. +// If no response is received before the timeout expires, the request fails and +// returns an error. +// API name: master_timeout +func (r *PutIndexTemplate) MasterTimeout(duration string) *PutIndexTemplate { + r.values.Set("master_timeout", duration) + + return r +} + +// Cause User defined reason for creating/updating the index template +// API name: cause +func (r *PutIndexTemplate) Cause(cause string) *PutIndexTemplate { + r.values.Set("cause", cause) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutIndexTemplate) ErrorTrace(errortrace bool) *PutIndexTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutIndexTemplate) FilterPath(filterpaths ...string) *PutIndexTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutIndexTemplate) Human(human bool) *PutIndexTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutIndexTemplate) Pretty(pretty bool) *PutIndexTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + +// AllowAutoCreate This setting overrides the value of the `action.auto_create_index` cluster +// setting. +// If set to `true` in a template, then indices can be automatically created +// using that template even if auto-creation of indices is disabled via +// `actions.auto_create_index`. +// If set to `false`, then indices or data streams matching the template must +// always be explicitly created, and may never be automatically created. +// API name: allow_auto_create +func (r *PutIndexTemplate) AllowAutoCreate(allowautocreate bool) *PutIndexTemplate { + r.req.AllowAutoCreate = &allowautocreate + + return r +} + // ComposedOf An ordered list of component template names. // Component templates are merged in the order specified, meaning that the last // component template specified has the highest precedence. @@ -343,6 +423,27 @@ func (r *PutIndexTemplate) DataStream(datastream *types.DataStreamVisibility) *P return r } +// Deprecated Marks this index template as deprecated. When creating or updating a +// non-deprecated index template +// that uses deprecated components, Elasticsearch will emit a deprecation +// warning. +// API name: deprecated +func (r *PutIndexTemplate) Deprecated(deprecated bool) *PutIndexTemplate { + r.req.Deprecated = &deprecated + + return r +} + +// IgnoreMissingComponentTemplates The configuration option ignore_missing_component_templates can be used when +// an index template +// references a component template that might not exist +// API name: ignore_missing_component_templates +func (r *PutIndexTemplate) IgnoreMissingComponentTemplates(ignoremissingcomponenttemplates ...string) *PutIndexTemplate { + r.req.IgnoreMissingComponentTemplates = ignoremissingcomponenttemplates + + return r +} + // IndexPatterns Name of the index template to create. // API name: index_patterns func (r *PutIndexTemplate) IndexPatterns(indices ...string) *PutIndexTemplate { @@ -368,7 +469,8 @@ func (r *PutIndexTemplate) Meta_(metadata types.Metadata) *PutIndexTemplate { // priority 0 (lowest priority). // This number is not automatically generated by Elasticsearch. // API name: priority -func (r *PutIndexTemplate) Priority(priority int) *PutIndexTemplate { +func (r *PutIndexTemplate) Priority(priority int64) *PutIndexTemplate { + r.req.Priority = &priority return r diff --git a/typedapi/indices/putindextemplate/request.go b/typedapi/indices/putindextemplate/request.go index 422a587fb4..360ba18186 100644 --- a/typedapi/indices/putindextemplate/request.go +++ b/typedapi/indices/putindextemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putindextemplate @@ -33,9 +33,17 @@ import ( // Request holds the request body struct for the package putindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/put_index_template/IndicesPutIndexTemplateRequest.ts#L36-L95 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/put_index_template/IndicesPutIndexTemplateRequest.ts#L37-L119 type Request struct { + // AllowAutoCreate This setting overrides the value of the `action.auto_create_index` cluster + // setting. + // If set to `true` in a template, then indices can be automatically created + // using that template even if auto-creation of indices is disabled via + // `actions.auto_create_index`. + // If set to `false`, then indices or data streams matching the template must + // always be explicitly created, and may never be automatically created. + AllowAutoCreate *bool `json:"allow_auto_create,omitempty"` // ComposedOf An ordered list of component template names. // Component templates are merged in the order specified, meaning that the last // component template specified has the highest precedence. @@ -45,6 +53,15 @@ type Request struct { // Supports an empty object. // Data streams require a matching index template with a `data_stream` object. DataStream *types.DataStreamVisibility `json:"data_stream,omitempty"` + // Deprecated Marks this index template as deprecated. When creating or updating a + // non-deprecated index template + // that uses deprecated components, Elasticsearch will emit a deprecation + // warning. + Deprecated *bool `json:"deprecated,omitempty"` + // IgnoreMissingComponentTemplates The configuration option ignore_missing_component_templates can be used when + // an index template + // references a component template that might not exist + IgnoreMissingComponentTemplates []string `json:"ignore_missing_component_templates,omitempty"` // IndexPatterns Name of the index template to create. IndexPatterns []string `json:"index_patterns,omitempty"` // Meta_ Optional user metadata about the index template. @@ -57,7 +74,7 @@ type Request struct { // If no priority is specified the template is treated as though it is of // priority 0 (lowest priority). // This number is not automatically generated by Elasticsearch. - Priority *int `json:"priority,omitempty"` + Priority *int64 `json:"priority,omitempty"` // Template Template to be applied. // It may optionally include an `aliases`, `mappings`, or `settings` // configuration. @@ -70,6 +87,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -99,6 +117,20 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { + case "allow_auto_create": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "AllowAutoCreate", err) + } + s.AllowAutoCreate = &value + case bool: + s.AllowAutoCreate = &v + } + case "composed_of": if err := dec.Decode(&s.ComposedOf); err != nil { return fmt.Errorf("%s | %w", "ComposedOf", err) @@ -109,6 +141,25 @@ func (s *Request) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "DataStream", err) } + case "deprecated": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Deprecated", err) + } + s.Deprecated = &value + case bool: + s.Deprecated = &v + } + + case "ignore_missing_component_templates": + if err := dec.Decode(&s.IgnoreMissingComponentTemplates); err != nil { + return fmt.Errorf("%s | %w", "IgnoreMissingComponentTemplates", err) + } + case "index_patterns": rawMsg := json.RawMessage{} dec.Decode(&rawMsg) @@ -131,18 +182,17 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "priority": - - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.Atoi(v) + value, err := strconv.ParseInt(v, 10, 64) if err != nil { return fmt.Errorf("%s | %w", "Priority", err) } s.Priority = &value case float64: - f := int(v) + f := int64(v) s.Priority = &f } diff --git a/typedapi/indices/putindextemplate/response.go b/typedapi/indices/putindextemplate/response.go index a001927293..438521fc5f 100644 --- a/typedapi/indices/putindextemplate/response.go +++ b/typedapi/indices/putindextemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putindextemplate // Response holds the response body struct for the package putindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/put_index_template/IndicesPutIndexTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/put_index_template/IndicesPutIndexTemplateResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/putmapping/put_mapping.go b/typedapi/indices/putmapping/put_mapping.go index 5669fa36e8..4a7b9472d6 100644 --- a/typedapi/indices/putmapping/put_mapping.go +++ b/typedapi/indices/putmapping/put_mapping.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Updates the index mappings. +// Adds new fields to an existing data stream or index. +// You can also use this API to change the search settings of existing fields. +// For data streams, these changes are applied to all backing indices by +// default. package putmapping import ( @@ -83,7 +86,10 @@ func NewPutMappingFunc(tp elastictransport.Interface) NewPutMapping { } } -// Updates the index mappings. +// Adds new fields to an existing data stream or index. +// You can also use this API to change the search settings of existing fields. +// For data streams, these changes are applied to all backing indices by +// default. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-put-mapping.html func New(tp elastictransport.Interface) *PutMapping { @@ -380,6 +386,50 @@ func (r *PutMapping) WriteIndexOnly(writeindexonly bool) *PutMapping { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutMapping) ErrorTrace(errortrace bool) *PutMapping { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutMapping) FilterPath(filterpaths ...string) *PutMapping { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutMapping) Human(human bool) *PutMapping { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutMapping) Pretty(pretty bool) *PutMapping { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // DateDetection Controls whether dynamic date detection is enabled. // API name: date_detection func (r *PutMapping) DateDetection(datedetection bool) *PutMapping { diff --git a/typedapi/indices/putmapping/request.go b/typedapi/indices/putmapping/request.go index 658e875b9e..c9261d7b47 100644 --- a/typedapi/indices/putmapping/request.go +++ b/typedapi/indices/putmapping/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putmapping @@ -34,7 +34,7 @@ import ( // Request holds the request body struct for the package putmapping // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/put_mapping/IndicesPutMappingRequest.ts#L42-L149 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/put_mapping/IndicesPutMappingRequest.ts#L42-L149 type Request struct { // DateDetection Controls whether dynamic date detection is enabled. @@ -74,6 +74,7 @@ func NewRequest() *Request { r := &Request{ Properties: make(map[string]types.Property, 0), } + return r } @@ -104,7 +105,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "date_detection": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -159,7 +160,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "numeric_detection": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -179,7 +180,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -200,7 +201,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := types.NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -290,12 +291,6 @@ func (s *Request) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := types.NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := types.NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -314,6 +309,18 @@ func (s *Request) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := types.NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := types.NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := types.NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -470,6 +477,12 @@ func (s *Request) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := types.NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(types.Property) if err := localDec.Decode(&oo); err != nil { diff --git a/typedapi/indices/putmapping/response.go b/typedapi/indices/putmapping/response.go index 2059ed6e70..86286811b8 100644 --- a/typedapi/indices/putmapping/response.go +++ b/typedapi/indices/putmapping/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putmapping @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putmapping // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/put_mapping/IndicesPutMappingResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/put_mapping/IndicesPutMappingResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/putsettings/put_settings.go b/typedapi/indices/putsettings/put_settings.go index 8522cc09d9..b1fb3d4637 100644 --- a/typedapi/indices/putsettings/put_settings.go +++ b/typedapi/indices/putsettings/put_settings.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Updates the index settings. +// Changes a dynamic index setting in real time. For data streams, index setting +// changes are applied to all backing indices by default. package putsettings import ( @@ -81,7 +82,8 @@ func NewPutSettingsFunc(tp elastictransport.Interface) NewPutSettings { } } -// Updates the index settings. +// Changes a dynamic index setting in real time. For data streams, index setting +// changes are applied to all backing indices by default. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-update-settings.html func New(tp elastictransport.Interface) *PutSettings { @@ -91,6 +93,8 @@ func New(tp elastictransport.Interface) *PutSettings { headers: make(http.Header), buf: gobytes.NewBuffer(nil), + + req: NewRequest(), } if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { @@ -390,6 +394,50 @@ func (r *PutSettings) Timeout(duration string) *PutSettings { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutSettings) ErrorTrace(errortrace bool) *PutSettings { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutSettings) FilterPath(filterpaths ...string) *PutSettings { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutSettings) Human(human bool) *PutSettings { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutSettings) Pretty(pretty bool) *PutSettings { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: analysis func (r *PutSettings) Analysis(analysis *types.IndexSettingsAnalysis) *PutSettings { diff --git a/typedapi/indices/putsettings/request.go b/typedapi/indices/putsettings/request.go index a68f27c9c7..9efd2b9e24 100644 --- a/typedapi/indices/putsettings/request.go +++ b/typedapi/indices/putsettings/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putsettings @@ -26,5 +26,12 @@ import ( // Request holds the request body struct for the package putsettings // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/put_settings/IndicesPutSettingsRequest.ts#L25-L92 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/put_settings/IndicesPutSettingsRequest.ts#L25-L92 type Request = types.IndexSettings + +// NewRequest returns a Request +func NewRequest() *Request { + r := types.NewIndexSettings() + + return r +} diff --git a/typedapi/indices/putsettings/response.go b/typedapi/indices/putsettings/response.go index fcbf3c5643..3210386346 100644 --- a/typedapi/indices/putsettings/response.go +++ b/typedapi/indices/putsettings/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putsettings // Response holds the response body struct for the package putsettings // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/put_settings/IndicesPutSettingsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/put_settings/IndicesPutSettingsResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/puttemplate/put_template.go b/typedapi/indices/puttemplate/put_template.go index e0cb5cac26..92aa3c12e5 100644 --- a/typedapi/indices/puttemplate/put_template.go +++ b/typedapi/indices/puttemplate/put_template.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates or updates an index template. +// Create or update an index template. +// Index templates define settings, mappings, and aliases that can be applied +// automatically to new indices. package puttemplate import ( @@ -81,7 +83,9 @@ func NewPutTemplateFunc(tp elastictransport.Interface) NewPutTemplate { } } -// Creates or updates an index template. +// Create or update an index template. +// Index templates define settings, mappings, and aliases that can be applied +// automatically to new indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates-v1.html func New(tp elastictransport.Interface) *PutTemplate { @@ -321,14 +325,6 @@ func (r *PutTemplate) Create(create bool) *PutTemplate { return r } -// FlatSettings If `true`, returns settings in flat format. -// API name: flat_settings -func (r *PutTemplate) FlatSettings(flatsettings bool) *PutTemplate { - r.values.Set("flat_settings", strconv.FormatBool(flatsettings)) - - return r -} - // MasterTimeout Period to wait for a connection to the master node. If no response is // received before the timeout expires, the request fails and returns an error. // API name: master_timeout @@ -338,12 +334,53 @@ func (r *PutTemplate) MasterTimeout(duration string) *PutTemplate { return r } -// Timeout Period to wait for a response. -// If no response is received before the timeout expires, the request fails and -// returns an error. -// API name: timeout -func (r *PutTemplate) Timeout(duration string) *PutTemplate { - r.values.Set("timeout", duration) +// API name: cause +func (r *PutTemplate) Cause(cause string) *PutTemplate { + r.values.Set("cause", cause) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutTemplate) ErrorTrace(errortrace bool) *PutTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutTemplate) FilterPath(filterpaths ...string) *PutTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutTemplate) Human(human bool) *PutTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutTemplate) Pretty(pretty bool) *PutTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) return r } @@ -389,7 +426,7 @@ func (r *PutTemplate) Order(order int) *PutTemplate { // Settings Configuration options for the index. // API name: settings -func (r *PutTemplate) Settings(settings map[string]json.RawMessage) *PutTemplate { +func (r *PutTemplate) Settings(settings *types.IndexSettings) *PutTemplate { r.req.Settings = settings diff --git a/typedapi/indices/puttemplate/request.go b/typedapi/indices/puttemplate/request.go index 2392d40026..10abad9367 100644 --- a/typedapi/indices/puttemplate/request.go +++ b/typedapi/indices/puttemplate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package puttemplate @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package puttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/put_template/IndicesPutTemplateRequest.ts#L29-L105 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/put_template/IndicesPutTemplateRequest.ts#L29-L95 type Request struct { // Aliases Aliases for the index. @@ -50,7 +50,7 @@ type Request struct { // 'order' values are merged later, overriding templates with lower values. Order *int `json:"order,omitempty"` // Settings Configuration options for the index. - Settings map[string]json.RawMessage `json:"settings,omitempty"` + Settings *types.IndexSettings `json:"settings,omitempty"` // Version Version number used to manage index templates externally. This number // is not automatically generated by Elasticsearch. Version *int64 `json:"version,omitempty"` @@ -59,9 +59,9 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{ - Aliases: make(map[string]types.Alias, 0), - Settings: make(map[string]json.RawMessage, 0), + Aliases: make(map[string]types.Alias, 0), } + return r } @@ -122,7 +122,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "order": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -137,9 +137,6 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "settings": - if s.Settings == nil { - s.Settings = make(map[string]json.RawMessage, 0) - } if err := dec.Decode(&s.Settings); err != nil { return fmt.Errorf("%s | %w", "Settings", err) } diff --git a/typedapi/indices/puttemplate/response.go b/typedapi/indices/puttemplate/response.go index 22ed0bf5f8..b49997710a 100644 --- a/typedapi/indices/puttemplate/response.go +++ b/typedapi/indices/puttemplate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package puttemplate // Response holds the response body struct for the package puttemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/put_template/IndicesPutTemplateResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/put_template/IndicesPutTemplateResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/recovery/recovery.go b/typedapi/indices/recovery/recovery.go index 7e00ab1973..9dff1573a4 100644 --- a/typedapi/indices/recovery/recovery.go +++ b/typedapi/indices/recovery/recovery.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns information about ongoing index shard recoveries. +// Returns information about ongoing and completed shard recoveries for one or +// more indices. +// For data streams, the API returns information for the stream’s backing +// indices. package recovery import ( @@ -27,7 +30,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -75,7 +77,10 @@ func NewRecoveryFunc(tp elastictransport.Interface) NewRecovery { } } -// Returns information about ongoing index shard recoveries. +// Returns information about ongoing and completed shard recoveries for one or +// more indices. +// For data streams, the API returns information for the stream’s backing +// indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-recovery.html func New(tp elastictransport.Interface) *Recovery { @@ -264,7 +269,7 @@ func (r Recovery) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -320,3 +325,47 @@ func (r *Recovery) Detailed(detailed bool) *Recovery { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Recovery) ErrorTrace(errortrace bool) *Recovery { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Recovery) FilterPath(filterpaths ...string) *Recovery { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Recovery) Human(human bool) *Recovery { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Recovery) Pretty(pretty bool) *Recovery { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/recovery/response.go b/typedapi/indices/recovery/response.go index 30be957190..164036b3bd 100644 --- a/typedapi/indices/recovery/response.go +++ b/typedapi/indices/recovery/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package recovery @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package recovery // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/recovery/IndicesRecoveryResponse.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/recovery/IndicesRecoveryResponse.ts#L24-L27 type Response map[string]types.RecoveryStatus diff --git a/typedapi/indices/refresh/refresh.go b/typedapi/indices/refresh/refresh.go index e2ea8e3486..f5b7dbbfec 100644 --- a/typedapi/indices/refresh/refresh.go +++ b/typedapi/indices/refresh/refresh.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Performs the refresh operation in one or more indices. +// A refresh makes recent operations performed on one or more indices available +// for search. +// For data streams, the API runs the refresh operation on the stream’s backing +// indices. package refresh import ( @@ -27,7 +30,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -76,7 +78,10 @@ func NewRefreshFunc(tp elastictransport.Interface) NewRefresh { } } -// Performs the refresh operation in one or more indices. +// A refresh makes recent operations performed on one or more indices available +// for search. +// For data streams, the API runs the refresh operation on the stream’s backing +// indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-refresh.html func New(tp elastictransport.Interface) *Refresh { @@ -265,7 +270,7 @@ func (r Refresh) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -340,3 +345,47 @@ func (r *Refresh) IgnoreUnavailable(ignoreunavailable bool) *Refresh { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Refresh) ErrorTrace(errortrace bool) *Refresh { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Refresh) FilterPath(filterpaths ...string) *Refresh { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Refresh) Human(human bool) *Refresh { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Refresh) Pretty(pretty bool) *Refresh { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/refresh/response.go b/typedapi/indices/refresh/response.go index b10f42570b..22ecae3267 100644 --- a/typedapi/indices/refresh/response.go +++ b/typedapi/indices/refresh/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package refresh @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package refresh // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/refresh/IndicesRefreshResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/refresh/IndicesRefreshResponse.ts#L22-L24 type Response struct { Shards_ types.ShardStatistics `json:"_shards"` } diff --git a/typedapi/indices/reloadsearchanalyzers/reload_search_analyzers.go b/typedapi/indices/reloadsearchanalyzers/reload_search_analyzers.go index 1a0ae3807f..f26fb777ea 100644 --- a/typedapi/indices/reloadsearchanalyzers/reload_search_analyzers.go +++ b/typedapi/indices/reloadsearchanalyzers/reload_search_analyzers.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Reloads an index's search analyzers and their resources. package reloadsearchanalyzers @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -262,7 +261,7 @@ func (r ReloadSearchAnalyzers) IsSuccess(providedCtx context.Context) (bool, err if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -329,3 +328,47 @@ func (r *ReloadSearchAnalyzers) IgnoreUnavailable(ignoreunavailable bool) *Reloa return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ReloadSearchAnalyzers) ErrorTrace(errortrace bool) *ReloadSearchAnalyzers { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ReloadSearchAnalyzers) FilterPath(filterpaths ...string) *ReloadSearchAnalyzers { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ReloadSearchAnalyzers) Human(human bool) *ReloadSearchAnalyzers { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ReloadSearchAnalyzers) Pretty(pretty bool) *ReloadSearchAnalyzers { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/reloadsearchanalyzers/response.go b/typedapi/indices/reloadsearchanalyzers/response.go index 5bd392d552..5f6781cab2 100644 --- a/typedapi/indices/reloadsearchanalyzers/response.go +++ b/typedapi/indices/reloadsearchanalyzers/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package reloadsearchanalyzers @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package reloadsearchanalyzers // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/reload_search_analyzers/ReloadSearchAnalyzersResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/reload_search_analyzers/ReloadSearchAnalyzersResponse.ts#L22-L24 type Response struct { ReloadDetails []types.ReloadDetails `json:"reload_details"` Shards_ types.ShardStatistics `json:"_shards"` diff --git a/typedapi/indices/resolvecluster/resolve_cluster.go b/typedapi/indices/resolvecluster/resolve_cluster.go index 7b9ccd1eb1..b70db34d6f 100644 --- a/typedapi/indices/resolvecluster/resolve_cluster.go +++ b/typedapi/indices/resolvecluster/resolve_cluster.go @@ -16,10 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Resolves the specified index expressions to return information about each -// cluster, including the local cluster, if included. +// cluster, including +// the local cluster, if included. +// Multiple patterns and remote clusters are supported. package resolvecluster import ( @@ -28,7 +30,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -80,7 +81,9 @@ func NewResolveClusterFunc(tp elastictransport.Interface) NewResolveCluster { } // Resolves the specified index expressions to return information about each -// cluster, including the local cluster, if included. +// cluster, including +// the local cluster, if included. +// Multiple patterns and remote clusters are supported. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-resolve-cluster-api.html func New(tp elastictransport.Interface) *ResolveCluster { @@ -266,7 +269,7 @@ func (r ResolveCluster) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -352,3 +355,47 @@ func (r *ResolveCluster) IgnoreUnavailable(ignoreunavailable bool) *ResolveClust return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ResolveCluster) ErrorTrace(errortrace bool) *ResolveCluster { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ResolveCluster) FilterPath(filterpaths ...string) *ResolveCluster { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ResolveCluster) Human(human bool) *ResolveCluster { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ResolveCluster) Pretty(pretty bool) *ResolveCluster { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/resolvecluster/response.go b/typedapi/indices/resolvecluster/response.go index 561258698e..226e7911e7 100644 --- a/typedapi/indices/resolvecluster/response.go +++ b/typedapi/indices/resolvecluster/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package resolvecluster @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package resolvecluster // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/resolve_cluster/ResolveClusterResponse.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/resolve_cluster/ResolveClusterResponse.ts#L24-L27 type Response map[string]types.ResolveClusterInfo diff --git a/typedapi/indices/resolveindex/resolve_index.go b/typedapi/indices/resolveindex/resolve_index.go index 15f6508b8c..e91d98748a 100644 --- a/typedapi/indices/resolveindex/resolve_index.go +++ b/typedapi/indices/resolveindex/resolve_index.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns information about any matching indices, aliases, and data streams +// Resolves the specified name(s) and/or index patterns for indices, aliases, +// and data streams. +// Multiple patterns and remote clusters are supported. package resolveindex import ( @@ -27,9 +29,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -77,7 +79,9 @@ func NewResolveIndexFunc(tp elastictransport.Interface) NewResolveIndex { } } -// Returns information about any matching indices, aliases, and data streams +// Resolves the specified name(s) and/or index patterns for indices, aliases, +// and data streams. +// Multiple patterns and remote clusters are supported. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-resolve-index-api.html func New(tp elastictransport.Interface) *ResolveIndex { @@ -263,7 +267,7 @@ func (r ResolveIndex) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -318,3 +322,47 @@ func (r *ResolveIndex) ExpandWildcards(expandwildcards ...expandwildcard.ExpandW return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ResolveIndex) ErrorTrace(errortrace bool) *ResolveIndex { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ResolveIndex) FilterPath(filterpaths ...string) *ResolveIndex { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ResolveIndex) Human(human bool) *ResolveIndex { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ResolveIndex) Pretty(pretty bool) *ResolveIndex { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/resolveindex/response.go b/typedapi/indices/resolveindex/response.go index a8ee85a966..821c579979 100644 --- a/typedapi/indices/resolveindex/response.go +++ b/typedapi/indices/resolveindex/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package resolveindex @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package resolveindex // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/resolve_index/ResolveIndexResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/resolve_index/ResolveIndexResponse.ts#L22-L28 type Response struct { Aliases []types.ResolveIndexAliasItem `json:"aliases"` DataStreams []types.ResolveIndexDataStreamsItem `json:"data_streams"` diff --git a/typedapi/indices/rollover/request.go b/typedapi/indices/rollover/request.go index 6c2e073361..87b45312dc 100644 --- a/typedapi/indices/rollover/request.go +++ b/typedapi/indices/rollover/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package rollover @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package rollover // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/rollover/IndicesRolloverRequest.ts#L29-L99 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/rollover/IndicesRolloverRequest.ts#L29-L99 type Request struct { // Aliases Aliases for the target index. @@ -60,6 +60,7 @@ func NewRequest() *Request { Aliases: make(map[string]types.Alias, 0), Settings: make(map[string]json.RawMessage, 0), } + return r } diff --git a/typedapi/indices/rollover/response.go b/typedapi/indices/rollover/response.go index 5e8a4dff1a..c114c6555a 100644 --- a/typedapi/indices/rollover/response.go +++ b/typedapi/indices/rollover/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package rollover // Response holds the response body struct for the package rollover // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/rollover/IndicesRolloverResponse.ts#L22-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/rollover/IndicesRolloverResponse.ts#L22-L32 type Response struct { Acknowledged bool `json:"acknowledged"` Conditions map[string]bool `json:"conditions"` diff --git a/typedapi/indices/rollover/rollover.go b/typedapi/indices/rollover/rollover.go index 293ac671a3..ecad8b8c4f 100644 --- a/typedapi/indices/rollover/rollover.go +++ b/typedapi/indices/rollover/rollover.go @@ -16,10 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Updates an alias to point to a new index when the existing index -// is considered to be too large or too old. +// Creates a new index for a data stream or index alias. package rollover import ( @@ -85,8 +84,7 @@ func NewRolloverFunc(tp elastictransport.Interface) NewRollover { } } -// Updates an alias to point to a new index when the existing index -// is considered to be too large or too old. +// Creates a new index for a data stream or index alias. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-rollover-index.html func New(tp elastictransport.Interface) *Rollover { @@ -386,6 +384,50 @@ func (r *Rollover) WaitForActiveShards(waitforactiveshards string) *Rollover { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Rollover) ErrorTrace(errortrace bool) *Rollover { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Rollover) FilterPath(filterpaths ...string) *Rollover { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Rollover) Human(human bool) *Rollover { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Rollover) Pretty(pretty bool) *Rollover { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Aliases Aliases for the target index. // Data streams do not support this parameter. // API name: aliases diff --git a/typedapi/indices/segments/response.go b/typedapi/indices/segments/response.go index ee9a10a4f1..38418d9d0b 100644 --- a/typedapi/indices/segments/response.go +++ b/typedapi/indices/segments/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package segments @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package segments // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/segments/IndicesSegmentsResponse.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/segments/IndicesSegmentsResponse.ts#L24-L29 type Response struct { Indices map[string]types.IndexSegment `json:"indices"` Shards_ types.ShardStatistics `json:"_shards"` diff --git a/typedapi/indices/segments/segments.go b/typedapi/indices/segments/segments.go index 10e36c842d..4578fe4d21 100644 --- a/typedapi/indices/segments/segments.go +++ b/typedapi/indices/segments/segments.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Provides low-level information about segments in a Lucene index. +// Returns low-level information about the Lucene segments in index shards. +// For data streams, the API returns information about the stream’s backing +// indices. package segments import ( @@ -27,7 +29,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -76,7 +77,9 @@ func NewSegmentsFunc(tp elastictransport.Interface) NewSegments { } } -// Provides low-level information about segments in a Lucene index. +// Returns low-level information about the Lucene segments in index shards. +// For data streams, the API returns information about the stream’s backing +// indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-segments.html func New(tp elastictransport.Interface) *Segments { @@ -265,7 +268,7 @@ func (r Segments) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -348,3 +351,47 @@ func (r *Segments) Verbose(verbose bool) *Segments { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Segments) ErrorTrace(errortrace bool) *Segments { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Segments) FilterPath(filterpaths ...string) *Segments { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Segments) Human(human bool) *Segments { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Segments) Pretty(pretty bool) *Segments { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/shardstores/response.go b/typedapi/indices/shardstores/response.go index ec1945093a..688fb9c68c 100644 --- a/typedapi/indices/shardstores/response.go +++ b/typedapi/indices/shardstores/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package shardstores @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package shardstores // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/shard_stores/IndicesShardStoresResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/shard_stores/IndicesShardStoresResponse.ts#L24-L26 type Response struct { Indices map[string]types.IndicesShardStores `json:"indices"` } diff --git a/typedapi/indices/shardstores/shard_stores.go b/typedapi/indices/shardstores/shard_stores.go index 3930cae6af..d1e7462b9a 100644 --- a/typedapi/indices/shardstores/shard_stores.go +++ b/typedapi/indices/shardstores/shard_stores.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Provides store information for shard copies of indices. +// Retrieves store information about replica shards in one or more indices. +// For data streams, the API retrieves store information for the stream’s +// backing indices. package shardstores import ( @@ -27,7 +29,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -77,7 +78,9 @@ func NewShardStoresFunc(tp elastictransport.Interface) NewShardStores { } } -// Provides store information for shard copies of indices. +// Retrieves store information about replica shards in one or more indices. +// For data streams, the API retrieves store information for the stream’s +// backing indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-shards-stores.html func New(tp elastictransport.Interface) *ShardStores { @@ -266,7 +269,7 @@ func (r ShardStores) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -349,3 +352,47 @@ func (r *ShardStores) Status(statuses ...shardstorestatus.ShardStoreStatus) *Sha return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ShardStores) ErrorTrace(errortrace bool) *ShardStores { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ShardStores) FilterPath(filterpaths ...string) *ShardStores { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ShardStores) Human(human bool) *ShardStores { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ShardStores) Pretty(pretty bool) *ShardStores { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/shrink/request.go b/typedapi/indices/shrink/request.go index 19e9b3a8f3..8565783e37 100644 --- a/typedapi/indices/shrink/request.go +++ b/typedapi/indices/shrink/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package shrink @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package shrink // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/shrink/IndicesShrinkRequest.ts#L27-L75 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/shrink/IndicesShrinkRequest.ts#L27-L75 type Request struct { // Aliases The key is the alias name. @@ -45,6 +45,7 @@ func NewRequest() *Request { Aliases: make(map[string]types.Alias, 0), Settings: make(map[string]json.RawMessage, 0), } + return r } diff --git a/typedapi/indices/shrink/response.go b/typedapi/indices/shrink/response.go index f07d273cef..e8b5562232 100644 --- a/typedapi/indices/shrink/response.go +++ b/typedapi/indices/shrink/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package shrink // Response holds the response body struct for the package shrink // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/shrink/IndicesShrinkResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/shrink/IndicesShrinkResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` Index string `json:"index"` diff --git a/typedapi/indices/shrink/shrink.go b/typedapi/indices/shrink/shrink.go index 7f56872ed0..ad3594142a 100644 --- a/typedapi/indices/shrink/shrink.go +++ b/typedapi/indices/shrink/shrink.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Allow to shrink an existing index into a new index with fewer primary shards. +// Shrinks an existing index into a new index with fewer primary shards. package shrink import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -85,7 +86,7 @@ func NewShrinkFunc(tp elastictransport.Interface) NewShrink { } } -// Allow to shrink an existing index into a new index with fewer primary shards. +// Shrinks an existing index into a new index with fewer primary shards. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-shrink-index.html func New(tp elastictransport.Interface) *Shrink { @@ -363,6 +364,50 @@ func (r *Shrink) WaitForActiveShards(waitforactiveshards string) *Shrink { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Shrink) ErrorTrace(errortrace bool) *Shrink { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Shrink) FilterPath(filterpaths ...string) *Shrink { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Shrink) Human(human bool) *Shrink { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Shrink) Pretty(pretty bool) *Shrink { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Aliases The key is the alias name. // Index alias names support date math. // API name: aliases diff --git a/typedapi/indices/simulateindextemplate/request.go b/typedapi/indices/simulateindextemplate/request.go deleted file mode 100644 index 2e5699e910..0000000000 --- a/typedapi/indices/simulateindextemplate/request.go +++ /dev/null @@ -1,185 +0,0 @@ -// Licensed to Elasticsearch B.V. under one or more contributor -// license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright -// ownership. Elasticsearch B.V. licenses this file to you under -// the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -package simulateindextemplate - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "io" - "strconv" - - "github.com/elastic/go-elasticsearch/v8/typedapi/types" -) - -// Request holds the request body struct for the package simulateindextemplate -// -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/simulate_index_template/IndicesSimulateIndexTemplateRequest.ts#L33-L115 -type Request struct { - - // AllowAutoCreate This setting overrides the value of the `action.auto_create_index` cluster - // setting. - // If set to `true` in a template, then indices can be automatically created - // using that template even if auto-creation of indices is disabled via - // `actions.auto_create_index`. - // If set to `false`, then indices or data streams matching the template must - // always be explicitly created, and may never be automatically created. - AllowAutoCreate *bool `json:"allow_auto_create,omitempty"` - // ComposedOf An ordered list of component template names. - // Component templates are merged in the order specified, meaning that the last - // component template specified has the highest precedence. - ComposedOf []string `json:"composed_of,omitempty"` - // DataStream If this object is included, the template is used to create data streams and - // their backing indices. - // Supports an empty object. - // Data streams require a matching index template with a `data_stream` object. - DataStream *types.DataStreamVisibility `json:"data_stream,omitempty"` - // IndexPatterns Array of wildcard (`*`) expressions used to match the names of data streams - // and indices during creation. - IndexPatterns []string `json:"index_patterns,omitempty"` - // Meta_ Optional user metadata about the index template. - // May have any contents. - // This map is not automatically generated by Elasticsearch. - Meta_ types.Metadata `json:"_meta,omitempty"` - // Priority Priority to determine index template precedence when a new data stream or - // index is created. - // The index template with the highest priority is chosen. - // If no priority is specified the template is treated as though it is of - // priority 0 (lowest priority). - // This number is not automatically generated by Elasticsearch. - Priority *int `json:"priority,omitempty"` - // Template Template to be applied. - // It may optionally include an `aliases`, `mappings`, or `settings` - // configuration. - Template *types.IndexTemplateMapping `json:"template,omitempty"` - // Version Version number used to manage index templates externally. - // This number is not automatically generated by Elasticsearch. - Version *int64 `json:"version,omitempty"` -} - -// NewRequest returns a Request -func NewRequest() *Request { - r := &Request{} - return r -} - -// FromJSON allows to load an arbitrary json into the request structure -func (r *Request) FromJSON(data string) (*Request, error) { - var req Request - err := json.Unmarshal([]byte(data), &req) - - if err != nil { - return nil, fmt.Errorf("could not deserialise json into Simulateindextemplate request: %w", err) - } - - return &req, nil -} - -func (s *Request) UnmarshalJSON(data []byte) error { - dec := json.NewDecoder(bytes.NewReader(data)) - - for { - t, err := dec.Token() - if err != nil { - if errors.Is(err, io.EOF) { - break - } - return err - } - - switch t { - - case "allow_auto_create": - var tmp interface{} - dec.Decode(&tmp) - switch v := tmp.(type) { - case string: - value, err := strconv.ParseBool(v) - if err != nil { - return fmt.Errorf("%s | %w", "AllowAutoCreate", err) - } - s.AllowAutoCreate = &value - case bool: - s.AllowAutoCreate = &v - } - - case "composed_of": - if err := dec.Decode(&s.ComposedOf); err != nil { - return fmt.Errorf("%s | %w", "ComposedOf", err) - } - - case "data_stream": - if err := dec.Decode(&s.DataStream); err != nil { - return fmt.Errorf("%s | %w", "DataStream", err) - } - - case "index_patterns": - rawMsg := json.RawMessage{} - dec.Decode(&rawMsg) - if !bytes.HasPrefix(rawMsg, []byte("[")) { - o := new(string) - if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return fmt.Errorf("%s | %w", "IndexPatterns", err) - } - - s.IndexPatterns = append(s.IndexPatterns, *o) - } else { - if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.IndexPatterns); err != nil { - return fmt.Errorf("%s | %w", "IndexPatterns", err) - } - } - - case "_meta": - if err := dec.Decode(&s.Meta_); err != nil { - return fmt.Errorf("%s | %w", "Meta_", err) - } - - case "priority": - - var tmp interface{} - dec.Decode(&tmp) - switch v := tmp.(type) { - case string: - value, err := strconv.Atoi(v) - if err != nil { - return fmt.Errorf("%s | %w", "Priority", err) - } - s.Priority = &value - case float64: - f := int(v) - s.Priority = &f - } - - case "template": - if err := dec.Decode(&s.Template); err != nil { - return fmt.Errorf("%s | %w", "Template", err) - } - - case "version": - if err := dec.Decode(&s.Version); err != nil { - return fmt.Errorf("%s | %w", "Version", err) - } - - } - } - return nil -} diff --git a/typedapi/indices/simulateindextemplate/response.go b/typedapi/indices/simulateindextemplate/response.go index c7cdba44a5..b040cce583 100644 --- a/typedapi/indices/simulateindextemplate/response.go +++ b/typedapi/indices/simulateindextemplate/response.go @@ -16,14 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package simulateindextemplate +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + // Response holds the response body struct for the package simulateindextemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/simulate_index_template/IndicesSimulateIndexTemplateResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/simulate_index_template/IndicesSimulateIndexTemplateResponse.ts#L25-L30 type Response struct { + Overlapping []types.Overlapping `json:"overlapping,omitempty"` + Template types.Template `json:"template"` } // NewResponse returns a Response diff --git a/typedapi/indices/simulateindextemplate/simulate_index_template.go b/typedapi/indices/simulateindextemplate/simulate_index_template.go index c2031c1518..18ce694710 100644 --- a/typedapi/indices/simulateindextemplate/simulate_index_template.go +++ b/typedapi/indices/simulateindextemplate/simulate_index_template.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Simulate matching the given index name against the index templates in the -// system +// Simulate an index. +// Returns the index configuration that would be applied to the specified index +// from an existing index template. package simulateindextemplate import ( - gobytes "bytes" "context" "encoding/json" "errors" @@ -54,10 +54,6 @@ type SimulateIndexTemplate struct { raw io.Reader - req *Request - deferred []func(request *Request) error - buf *gobytes.Buffer - paramSet int name string @@ -82,8 +78,9 @@ func NewSimulateIndexTemplateFunc(tp elastictransport.Interface) NewSimulateInde } } -// Simulate matching the given index name against the index templates in the -// system +// Simulate an index. +// Returns the index configuration that would be applied to the specified index +// from an existing index template. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-simulate-index.html func New(tp elastictransport.Interface) *SimulateIndexTemplate { @@ -91,10 +88,6 @@ func New(tp elastictransport.Interface) *SimulateIndexTemplate { transport: tp, values: make(url.Values), headers: make(http.Header), - - buf: gobytes.NewBuffer(nil), - - req: NewRequest(), } if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { @@ -106,21 +99,6 @@ func New(tp elastictransport.Interface) *SimulateIndexTemplate { return r } -// Raw takes a json payload as input which is then passed to the http.Request -// If specified Raw takes precedence on Request method. -func (r *SimulateIndexTemplate) Raw(raw io.Reader) *SimulateIndexTemplate { - r.raw = raw - - return r -} - -// Request allows to set the request property with the appropriate payload. -func (r *SimulateIndexTemplate) Request(req *Request) *SimulateIndexTemplate { - r.req = req - - return r -} - // HttpRequest returns the http.Request object built from the // given parameters. func (r *SimulateIndexTemplate) HttpRequest(ctx context.Context) (*http.Request, error) { @@ -130,31 +108,6 @@ func (r *SimulateIndexTemplate) HttpRequest(ctx context.Context) (*http.Request, var err error - if len(r.deferred) > 0 { - for _, f := range r.deferred { - deferredErr := f(r.req) - if deferredErr != nil { - return nil, deferredErr - } - } - } - - if r.raw == nil && r.req != nil { - - data, err := json.Marshal(r.req) - - if err != nil { - return nil, fmt.Errorf("could not serialise request for SimulateIndexTemplate: %w", err) - } - - r.buf.Write(data) - - } - - if r.buf.Len() > 0 { - r.raw = r.buf - } - r.path.Scheme = "http" switch { @@ -301,6 +254,45 @@ func (r SimulateIndexTemplate) Do(providedCtx context.Context) (*Response, error return nil, errorResponse } +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r SimulateIndexTemplate) IsSuccess(providedCtx context.Context) (bool, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "indices.simulate_index_template") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(io.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + if res.StatusCode != 404 { + err := fmt.Errorf("an error happened during the SimulateIndexTemplate query execution, status code: %d", res.StatusCode) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return false, err + } + + return false, nil +} + // Header set a key, value pair in the SimulateIndexTemplate headers map. func (r *SimulateIndexTemplate) Header(key, value string) *SimulateIndexTemplate { r.headers.Set(key, value) @@ -308,7 +300,7 @@ func (r *SimulateIndexTemplate) Header(key, value string) *SimulateIndexTemplate return r } -// Name Index or template name to simulate +// Name Name of the index to simulate // API Name: name func (r *SimulateIndexTemplate) _name(name string) *SimulateIndexTemplate { r.paramSet |= nameMask @@ -317,21 +309,8 @@ func (r *SimulateIndexTemplate) _name(name string) *SimulateIndexTemplate { return r } -// Create If `true`, the template passed in the body is only used if no existing -// templates match the same index patterns. If `false`, the simulation uses -// the template with the highest priority. Note that the template is not -// permanently added or updated in either case; it is only used for the -// simulation. -// API name: create -func (r *SimulateIndexTemplate) Create(create bool) *SimulateIndexTemplate { - r.values.Set("create", strconv.FormatBool(create)) - - return r -} - // MasterTimeout Period to wait for a connection to the master node. If no response is -// received -// before the timeout expires, the request fails and returns an error. +// received before the timeout expires, the request fails and returns an error. // API name: master_timeout func (r *SimulateIndexTemplate) MasterTimeout(duration string) *SimulateIndexTemplate { r.values.Set("master_timeout", duration) @@ -347,90 +326,46 @@ func (r *SimulateIndexTemplate) IncludeDefaults(includedefaults bool) *SimulateI return r } -// AllowAutoCreate This setting overrides the value of the `action.auto_create_index` cluster -// setting. -// If set to `true` in a template, then indices can be automatically created -// using that template even if auto-creation of indices is disabled via -// `actions.auto_create_index`. -// If set to `false`, then indices or data streams matching the template must -// always be explicitly created, and may never be automatically created. -// API name: allow_auto_create -func (r *SimulateIndexTemplate) AllowAutoCreate(allowautocreate bool) *SimulateIndexTemplate { - r.req.AllowAutoCreate = &allowautocreate +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *SimulateIndexTemplate) ErrorTrace(errortrace bool) *SimulateIndexTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) return r } -// ComposedOf An ordered list of component template names. -// Component templates are merged in the order specified, meaning that the last -// component template specified has the highest precedence. -// API name: composed_of -func (r *SimulateIndexTemplate) ComposedOf(composedofs ...string) *SimulateIndexTemplate { - r.req.ComposedOf = composedofs - - return r -} - -// DataStream If this object is included, the template is used to create data streams and -// their backing indices. -// Supports an empty object. -// Data streams require a matching index template with a `data_stream` object. -// API name: data_stream -func (r *SimulateIndexTemplate) DataStream(datastream *types.DataStreamVisibility) *SimulateIndexTemplate { - - r.req.DataStream = datastream - - return r -} - -// IndexPatterns Array of wildcard (`*`) expressions used to match the names of data streams -// and indices during creation. -// API name: index_patterns -func (r *SimulateIndexTemplate) IndexPatterns(indices ...string) *SimulateIndexTemplate { - r.req.IndexPatterns = indices - - return r -} - -// Meta_ Optional user metadata about the index template. -// May have any contents. -// This map is not automatically generated by Elasticsearch. -// API name: _meta -func (r *SimulateIndexTemplate) Meta_(metadata types.Metadata) *SimulateIndexTemplate { - r.req.Meta_ = metadata - - return r -} - -// Priority Priority to determine index template precedence when a new data stream or -// index is created. -// The index template with the highest priority is chosen. -// If no priority is specified the template is treated as though it is of -// priority 0 (lowest priority). -// This number is not automatically generated by Elasticsearch. -// API name: priority -func (r *SimulateIndexTemplate) Priority(priority int) *SimulateIndexTemplate { - r.req.Priority = &priority +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *SimulateIndexTemplate) FilterPath(filterpaths ...string) *SimulateIndexTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) return r } -// Template Template to be applied. -// It may optionally include an `aliases`, `mappings`, or `settings` -// configuration. -// API name: template -func (r *SimulateIndexTemplate) Template(template *types.IndexTemplateMapping) *SimulateIndexTemplate { - - r.req.Template = template +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *SimulateIndexTemplate) Human(human bool) *SimulateIndexTemplate { + r.values.Set("human", strconv.FormatBool(human)) return r } -// Version Version number used to manage index templates externally. -// This number is not automatically generated by Elasticsearch. -// API name: version -func (r *SimulateIndexTemplate) Version(versionnumber int64) *SimulateIndexTemplate { - r.req.Version = &versionnumber +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *SimulateIndexTemplate) Pretty(pretty bool) *SimulateIndexTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) return r } diff --git a/typedapi/indices/simulatetemplate/request.go b/typedapi/indices/simulatetemplate/request.go index e7294ac5f8..d50ebdd884 100644 --- a/typedapi/indices/simulatetemplate/request.go +++ b/typedapi/indices/simulatetemplate/request.go @@ -16,15 +16,198 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package simulatetemplate import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) // Request holds the request body struct for the package simulatetemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/simulate_template/IndicesSimulateTemplateRequest.ts#L25-L61 -type Request = types.IndexTemplate +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/simulate_template/IndicesSimulateTemplateRequest.ts#L27-L120 +type Request struct { + + // AllowAutoCreate This setting overrides the value of the `action.auto_create_index` cluster + // setting. + // If set to `true` in a template, then indices can be automatically created + // using that template even if auto-creation of indices is disabled via + // `actions.auto_create_index`. + // If set to `false`, then indices or data streams matching the template must + // always be explicitly created, and may never be automatically created. + AllowAutoCreate *bool `json:"allow_auto_create,omitempty"` + // ComposedOf An ordered list of component template names. + // Component templates are merged in the order specified, meaning that the last + // component template specified has the highest precedence. + ComposedOf []string `json:"composed_of,omitempty"` + // DataStream If this object is included, the template is used to create data streams and + // their backing indices. + // Supports an empty object. + // Data streams require a matching index template with a `data_stream` object. + DataStream *types.DataStreamVisibility `json:"data_stream,omitempty"` + // Deprecated Marks this index template as deprecated. When creating or updating a + // non-deprecated index template + // that uses deprecated components, Elasticsearch will emit a deprecation + // warning. + Deprecated *bool `json:"deprecated,omitempty"` + // IgnoreMissingComponentTemplates The configuration option ignore_missing_component_templates can be used when + // an index template + // references a component template that might not exist + IgnoreMissingComponentTemplates []string `json:"ignore_missing_component_templates,omitempty"` + // IndexPatterns Array of wildcard (`*`) expressions used to match the names of data streams + // and indices during creation. + IndexPatterns []string `json:"index_patterns,omitempty"` + // Meta_ Optional user metadata about the index template. + // May have any contents. + // This map is not automatically generated by Elasticsearch. + Meta_ types.Metadata `json:"_meta,omitempty"` + // Priority Priority to determine index template precedence when a new data stream or + // index is created. + // The index template with the highest priority is chosen. + // If no priority is specified the template is treated as though it is of + // priority 0 (lowest priority). + // This number is not automatically generated by Elasticsearch. + Priority *int64 `json:"priority,omitempty"` + // Template Template to be applied. + // It may optionally include an `aliases`, `mappings`, or `settings` + // configuration. + Template *types.IndexTemplateMapping `json:"template,omitempty"` + // Version Version number used to manage index templates externally. + // This number is not automatically generated by Elasticsearch. + Version *int64 `json:"version,omitempty"` +} + +// NewRequest returns a Request +func NewRequest() *Request { + r := &Request{} + + return r +} + +// FromJSON allows to load an arbitrary json into the request structure +func (r *Request) FromJSON(data string) (*Request, error) { + var req Request + err := json.Unmarshal([]byte(data), &req) + + if err != nil { + return nil, fmt.Errorf("could not deserialise json into Simulatetemplate request: %w", err) + } + + return &req, nil +} + +func (s *Request) UnmarshalJSON(data []byte) error { + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "allow_auto_create": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "AllowAutoCreate", err) + } + s.AllowAutoCreate = &value + case bool: + s.AllowAutoCreate = &v + } + + case "composed_of": + if err := dec.Decode(&s.ComposedOf); err != nil { + return fmt.Errorf("%s | %w", "ComposedOf", err) + } + + case "data_stream": + if err := dec.Decode(&s.DataStream); err != nil { + return fmt.Errorf("%s | %w", "DataStream", err) + } + + case "deprecated": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Deprecated", err) + } + s.Deprecated = &value + case bool: + s.Deprecated = &v + } + + case "ignore_missing_component_templates": + if err := dec.Decode(&s.IgnoreMissingComponentTemplates); err != nil { + return fmt.Errorf("%s | %w", "IgnoreMissingComponentTemplates", err) + } + + case "index_patterns": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "IndexPatterns", err) + } + + s.IndexPatterns = append(s.IndexPatterns, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.IndexPatterns); err != nil { + return fmt.Errorf("%s | %w", "IndexPatterns", err) + } + } + + case "_meta": + if err := dec.Decode(&s.Meta_); err != nil { + return fmt.Errorf("%s | %w", "Meta_", err) + } + + case "priority": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return fmt.Errorf("%s | %w", "Priority", err) + } + s.Priority = &value + case float64: + f := int64(v) + s.Priority = &f + } + + case "template": + if err := dec.Decode(&s.Template); err != nil { + return fmt.Errorf("%s | %w", "Template", err) + } + + case "version": + if err := dec.Decode(&s.Version); err != nil { + return fmt.Errorf("%s | %w", "Version", err) + } + + } + } + return nil +} diff --git a/typedapi/indices/simulatetemplate/response.go b/typedapi/indices/simulatetemplate/response.go index 2213be4ca8..a4b3af68fc 100644 --- a/typedapi/indices/simulatetemplate/response.go +++ b/typedapi/indices/simulatetemplate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package simulatetemplate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package simulatetemplate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L26-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L26-L31 type Response struct { Overlapping []types.Overlapping `json:"overlapping,omitempty"` Template types.Template `json:"template"` diff --git a/typedapi/indices/simulatetemplate/simulate_template.go b/typedapi/indices/simulatetemplate/simulate_template.go index 982aa8d099..f50c47d575 100644 --- a/typedapi/indices/simulatetemplate/simulate_template.go +++ b/typedapi/indices/simulatetemplate/simulate_template.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Simulate resolving the given template name or body +// Simulate an index template. +// Returns the index configuration that would be applied by a particular index +// template. package simulatetemplate import ( @@ -79,7 +81,9 @@ func NewSimulateTemplateFunc(tp elastictransport.Interface) NewSimulateTemplate } } -// Simulate resolving the given template name or body +// Simulate an index template. +// Returns the index configuration that would be applied by a particular index +// template. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-simulate-template.html func New(tp elastictransport.Interface) *SimulateTemplate { @@ -89,6 +93,8 @@ func New(tp elastictransport.Interface) *SimulateTemplate { headers: make(http.Header), buf: gobytes.NewBuffer(nil), + + req: NewRequest(), } if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { @@ -348,6 +354,57 @@ func (r *SimulateTemplate) IncludeDefaults(includedefaults bool) *SimulateTempla return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *SimulateTemplate) ErrorTrace(errortrace bool) *SimulateTemplate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *SimulateTemplate) FilterPath(filterpaths ...string) *SimulateTemplate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *SimulateTemplate) Human(human bool) *SimulateTemplate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *SimulateTemplate) Pretty(pretty bool) *SimulateTemplate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + +// AllowAutoCreate This setting overrides the value of the `action.auto_create_index` cluster +// setting. +// If set to `true` in a template, then indices can be automatically created +// using that template even if auto-creation of indices is disabled via +// `actions.auto_create_index`. +// If set to `false`, then indices or data streams matching the template must +// always be explicitly created, and may never be automatically created. // API name: allow_auto_create func (r *SimulateTemplate) AllowAutoCreate(allowautocreate bool) *SimulateTemplate { r.req.AllowAutoCreate = &allowautocreate @@ -370,22 +427,45 @@ func (r *SimulateTemplate) ComposedOf(composedofs ...string) *SimulateTemplate { // Supports an empty object. // Data streams require a matching index template with a `data_stream` object. // API name: data_stream -func (r *SimulateTemplate) DataStream(datastream *types.IndexTemplateDataStreamConfiguration) *SimulateTemplate { +func (r *SimulateTemplate) DataStream(datastream *types.DataStreamVisibility) *SimulateTemplate { r.req.DataStream = datastream return r } -// IndexPatterns Name of the index template. +// Deprecated Marks this index template as deprecated. When creating or updating a +// non-deprecated index template +// that uses deprecated components, Elasticsearch will emit a deprecation +// warning. +// API name: deprecated +func (r *SimulateTemplate) Deprecated(deprecated bool) *SimulateTemplate { + r.req.Deprecated = &deprecated + + return r +} + +// IgnoreMissingComponentTemplates The configuration option ignore_missing_component_templates can be used when +// an index template +// references a component template that might not exist +// API name: ignore_missing_component_templates +func (r *SimulateTemplate) IgnoreMissingComponentTemplates(ignoremissingcomponenttemplates ...string) *SimulateTemplate { + r.req.IgnoreMissingComponentTemplates = ignoremissingcomponenttemplates + + return r +} + +// IndexPatterns Array of wildcard (`*`) expressions used to match the names of data streams +// and indices during creation. // API name: index_patterns -func (r *SimulateTemplate) IndexPatterns(names ...string) *SimulateTemplate { - r.req.IndexPatterns = names +func (r *SimulateTemplate) IndexPatterns(indices ...string) *SimulateTemplate { + r.req.IndexPatterns = indices return r } -// Meta_ Optional user metadata about the index template. May have any contents. +// Meta_ Optional user metadata about the index template. +// May have any contents. // This map is not automatically generated by Elasticsearch. // API name: _meta func (r *SimulateTemplate) Meta_(metadata types.Metadata) *SimulateTemplate { @@ -412,7 +492,7 @@ func (r *SimulateTemplate) Priority(priority int64) *SimulateTemplate { // It may optionally include an `aliases`, `mappings`, or `settings` // configuration. // API name: template -func (r *SimulateTemplate) Template(template *types.IndexTemplateSummary) *SimulateTemplate { +func (r *SimulateTemplate) Template(template *types.IndexTemplateMapping) *SimulateTemplate { r.req.Template = template diff --git a/typedapi/indices/split/request.go b/typedapi/indices/split/request.go index 1e2853588d..af11bfe45d 100644 --- a/typedapi/indices/split/request.go +++ b/typedapi/indices/split/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package split @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package split // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/split/IndicesSplitRequest.ts#L27-L74 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/split/IndicesSplitRequest.ts#L27-L74 type Request struct { // Aliases Aliases for the resulting index. @@ -44,6 +44,7 @@ func NewRequest() *Request { Aliases: make(map[string]types.Alias, 0), Settings: make(map[string]json.RawMessage, 0), } + return r } diff --git a/typedapi/indices/split/response.go b/typedapi/indices/split/response.go index 27803da229..9aaabd4fd0 100644 --- a/typedapi/indices/split/response.go +++ b/typedapi/indices/split/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package split // Response holds the response body struct for the package split // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/split/IndicesSplitResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/split/IndicesSplitResponse.ts#L22-L28 type Response struct { Acknowledged bool `json:"acknowledged"` Index string `json:"index"` diff --git a/typedapi/indices/split/split.go b/typedapi/indices/split/split.go index fe16c8faa5..50af328add 100644 --- a/typedapi/indices/split/split.go +++ b/typedapi/indices/split/split.go @@ -16,10 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Allows you to split an existing index into a new index with more primary -// shards. +// Splits an existing index into a new index with more primary shards. package split import ( @@ -31,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -86,8 +86,7 @@ func NewSplitFunc(tp elastictransport.Interface) NewSplit { } } -// Allows you to split an existing index into a new index with more primary -// shards. +// Splits an existing index into a new index with more primary shards. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-split-index.html func New(tp elastictransport.Interface) *Split { @@ -365,6 +364,50 @@ func (r *Split) WaitForActiveShards(waitforactiveshards string) *Split { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Split) ErrorTrace(errortrace bool) *Split { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Split) FilterPath(filterpaths ...string) *Split { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Split) Human(human bool) *Split { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Split) Pretty(pretty bool) *Split { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Aliases Aliases for the resulting index. // API name: aliases func (r *Split) Aliases(aliases map[string]types.Alias) *Split { diff --git a/typedapi/indices/stats/response.go b/typedapi/indices/stats/response.go index 668ff3dc44..37970edc88 100644 --- a/typedapi/indices/stats/response.go +++ b/typedapi/indices/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/IndicesStatsResponse.ts#L24-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/IndicesStatsResponse.ts#L24-L30 type Response struct { All_ types.IndicesStats `json:"_all"` Indices map[string]types.IndicesStats `json:"indices,omitempty"` diff --git a/typedapi/indices/stats/stats.go b/typedapi/indices/stats/stats.go index dee9b495ca..7a4363838d 100644 --- a/typedapi/indices/stats/stats.go +++ b/typedapi/indices/stats/stats.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Provides statistics on operations happening in an index. +// Returns statistics for one or more indices. +// For data streams, the API retrieves statistics for the stream’s backing +// indices. package stats import ( @@ -27,7 +29,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -80,7 +81,9 @@ func NewStatsFunc(tp elastictransport.Interface) NewStats { } } -// Provides statistics on operations happening in an index. +// Returns statistics for one or more indices. +// For data streams, the API retrieves statistics for the stream’s backing +// indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-stats.html func New(tp elastictransport.Interface) *Stats { @@ -297,7 +300,7 @@ func (r Stats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -433,3 +436,47 @@ func (r *Stats) Level(level level.Level) *Stats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Stats) ErrorTrace(errortrace bool) *Stats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Stats) FilterPath(filterpaths ...string) *Stats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Stats) Human(human bool) *Stats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Stats) Pretty(pretty bool) *Stats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/unfreeze/response.go b/typedapi/indices/unfreeze/response.go index f5dc467e1f..d789db2752 100644 --- a/typedapi/indices/unfreeze/response.go +++ b/typedapi/indices/unfreeze/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package unfreeze // Response holds the response body struct for the package unfreeze // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/unfreeze/IndicesUnfreezeResponse.ts#L20-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/unfreeze/IndicesUnfreezeResponse.ts#L20-L25 type Response struct { Acknowledged bool `json:"acknowledged"` ShardsAcknowledged bool `json:"shards_acknowledged"` diff --git a/typedapi/indices/unfreeze/unfreeze.go b/typedapi/indices/unfreeze/unfreeze.go index d7011a2d6b..8d8e70dd9c 100644 --- a/typedapi/indices/unfreeze/unfreeze.go +++ b/typedapi/indices/unfreeze/unfreeze.go @@ -16,10 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Unfreezes an index. When a frozen index is unfrozen, the index goes through -// the normal recovery process and becomes writeable again. +// Unfreezes an index. package unfreeze import ( @@ -28,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -79,8 +77,7 @@ func NewUnfreezeFunc(tp elastictransport.Interface) NewUnfreeze { } } -// Unfreezes an index. When a frozen index is unfrozen, the index goes through -// the normal recovery process and becomes writeable again. +// Unfreezes an index. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/unfreeze-index-api.html func New(tp elastictransport.Interface) *Unfreeze { @@ -264,7 +261,7 @@ func (r Unfreeze) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -366,3 +363,47 @@ func (r *Unfreeze) WaitForActiveShards(waitforactiveshards string) *Unfreeze { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Unfreeze) ErrorTrace(errortrace bool) *Unfreeze { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Unfreeze) FilterPath(filterpaths ...string) *Unfreeze { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Unfreeze) Human(human bool) *Unfreeze { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Unfreeze) Pretty(pretty bool) *Unfreeze { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/indices/updatealiases/request.go b/typedapi/indices/updatealiases/request.go index 8be89c6549..9a1d509d2a 100644 --- a/typedapi/indices/updatealiases/request.go +++ b/typedapi/indices/updatealiases/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatealiases @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package updatealiases // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/update_aliases/IndicesUpdateAliasesRequest.ts#L24-L51 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/update_aliases/IndicesUpdateAliasesRequest.ts#L24-L51 type Request struct { // Actions Actions to perform. @@ -39,6 +39,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/indices/updatealiases/response.go b/typedapi/indices/updatealiases/response.go index f31c05fa13..e73174744d 100644 --- a/typedapi/indices/updatealiases/response.go +++ b/typedapi/indices/updatealiases/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatealiases // Response holds the response body struct for the package updatealiases // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/update_aliases/IndicesUpdateAliasesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/update_aliases/IndicesUpdateAliasesResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/indices/updatealiases/update_aliases.go b/typedapi/indices/updatealiases/update_aliases.go index 2c4fd322ce..72e5fa3c48 100644 --- a/typedapi/indices/updatealiases/update_aliases.go +++ b/typedapi/indices/updatealiases/update_aliases.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Updates index aliases. +// Adds a data stream or index to an alias. package updatealiases import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -72,7 +73,7 @@ func NewUpdateAliasesFunc(tp elastictransport.Interface) NewUpdateAliases { } } -// Updates index aliases. +// Adds a data stream or index to an alias. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html func New(tp elastictransport.Interface) *UpdateAliases { @@ -309,6 +310,50 @@ func (r *UpdateAliases) Timeout(duration string) *UpdateAliases { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *UpdateAliases) ErrorTrace(errortrace bool) *UpdateAliases { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *UpdateAliases) FilterPath(filterpaths ...string) *UpdateAliases { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *UpdateAliases) Human(human bool) *UpdateAliases { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *UpdateAliases) Pretty(pretty bool) *UpdateAliases { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Actions Actions to perform. // API name: actions func (r *UpdateAliases) Actions(actions ...types.IndicesAction) *UpdateAliases { diff --git a/typedapi/indices/validatequery/request.go b/typedapi/indices/validatequery/request.go index ee955c2289..ac2f8f2b04 100644 --- a/typedapi/indices/validatequery/request.go +++ b/typedapi/indices/validatequery/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package validatequery @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package validatequery // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/validate_query/IndicesValidateQueryRequest.ts#L25-L111 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/validate_query/IndicesValidateQueryRequest.ts#L25-L111 type Request struct { // Query Query in the Lucene query string syntax. @@ -39,6 +39,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/indices/validatequery/response.go b/typedapi/indices/validatequery/response.go index 05727994d6..3a7de6d8f2 100644 --- a/typedapi/indices/validatequery/response.go +++ b/typedapi/indices/validatequery/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package validatequery @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package validatequery // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/validate_query/IndicesValidateQueryResponse.ts#L23-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/validate_query/IndicesValidateQueryResponse.ts#L23-L30 type Response struct { Error *string `json:"error,omitempty"` Explanations []types.IndicesValidationExplanation `json:"explanations,omitempty"` diff --git a/typedapi/indices/validatequery/validate_query.go b/typedapi/indices/validatequery/validate_query.go index e37661de59..7564dc8580 100644 --- a/typedapi/indices/validatequery/validate_query.go +++ b/typedapi/indices/validatequery/validate_query.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Allows a user to validate a potentially expensive query without executing it. +// Validates a potentially expensive query without executing it. package validatequery import ( @@ -81,7 +81,7 @@ func NewValidateQueryFunc(tp elastictransport.Interface) NewValidateQuery { } } -// Allows a user to validate a potentially expensive query without executing it. +// Validates a potentially expensive query without executing it. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-validate.html func New(tp elastictransport.Interface) *ValidateQuery { @@ -440,6 +440,50 @@ func (r *ValidateQuery) Q(q string) *ValidateQuery { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ValidateQuery) ErrorTrace(errortrace bool) *ValidateQuery { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ValidateQuery) FilterPath(filterpaths ...string) *ValidateQuery { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ValidateQuery) Human(human bool) *ValidateQuery { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ValidateQuery) Pretty(pretty bool) *ValidateQuery { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Query Query in the Lucene query string syntax. // API name: query func (r *ValidateQuery) Query(query *types.Query) *ValidateQuery { diff --git a/typedapi/inference/deletemodel/delete_model.go b/typedapi/inference/delete/delete.go similarity index 67% rename from typedapi/inference/deletemodel/delete_model.go rename to typedapi/inference/delete/delete.go index 01c41ce824..c7f4b4ff08 100644 --- a/typedapi/inference/deletemodel/delete_model.go +++ b/typedapi/inference/delete/delete.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Delete model in the Inference API -package deletemodel +// Delete an inference endpoint +package delete import ( "context" @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -45,7 +45,7 @@ const ( // ErrBuildPath is returned in case of missing parameters within the build of the request. var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") -type DeleteModel struct { +type Delete struct { transport elastictransport.Interface headers http.Header @@ -64,13 +64,13 @@ type DeleteModel struct { instrument elastictransport.Instrumentation } -// NewDeleteModel type alias for index. -type NewDeleteModel func(inferenceid string) *DeleteModel +// NewDelete type alias for index. +type NewDelete func(inferenceid string) *Delete -// NewDeleteModelFunc returns a new instance of DeleteModel with the provided transport. +// NewDeleteFunc returns a new instance of Delete with the provided transport. // Used in the index of the library this allows to retrieve every apis in once place. -func NewDeleteModelFunc(tp elastictransport.Interface) NewDeleteModel { - return func(inferenceid string) *DeleteModel { +func NewDeleteFunc(tp elastictransport.Interface) NewDelete { + return func(inferenceid string) *Delete { n := New(tp) n._inferenceid(inferenceid) @@ -79,11 +79,11 @@ func NewDeleteModelFunc(tp elastictransport.Interface) NewDeleteModel { } } -// Delete model in the Inference API +// Delete an inference endpoint // // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-inference-api.html -func New(tp elastictransport.Interface) *DeleteModel { - r := &DeleteModel{ +func New(tp elastictransport.Interface) *Delete { + r := &Delete{ transport: tp, values: make(url.Values), headers: make(http.Header), @@ -100,7 +100,7 @@ func New(tp elastictransport.Interface) *DeleteModel { // HttpRequest returns the http.Request object built from the // given parameters. -func (r *DeleteModel) HttpRequest(ctx context.Context) (*http.Request, error) { +func (r *Delete) HttpRequest(ctx context.Context) (*http.Request, error) { var path strings.Builder var method string var req *http.Request @@ -167,11 +167,11 @@ func (r *DeleteModel) HttpRequest(ctx context.Context) (*http.Request, error) { } // Perform runs the http.Request through the provided transport and returns an http.Response. -func (r DeleteModel) Perform(providedCtx context.Context) (*http.Response, error) { +func (r Delete) Perform(providedCtx context.Context) (*http.Response, error) { var ctx context.Context if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { if r.spanStarted == false { - ctx := instrument.Start(providedCtx, "inference.delete_model") + ctx := instrument.Start(providedCtx, "inference.delete") defer instrument.Close(ctx) } } @@ -188,17 +188,17 @@ func (r DeleteModel) Perform(providedCtx context.Context) (*http.Response, error } if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.BeforeRequest(req, "inference.delete_model") - if reader := instrument.RecordRequestBody(ctx, "inference.delete_model", r.raw); reader != nil { + instrument.BeforeRequest(req, "inference.delete") + if reader := instrument.RecordRequestBody(ctx, "inference.delete", r.raw); reader != nil { req.Body = reader } } res, err := r.transport.Perform(req) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "inference.delete_model") + instrument.AfterRequest(req, "elasticsearch", "inference.delete") } if err != nil { - localErr := fmt.Errorf("an error happened during the DeleteModel query execution: %w", err) + localErr := fmt.Errorf("an error happened during the Delete query execution: %w", err) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, localErr) } @@ -208,12 +208,12 @@ func (r DeleteModel) Perform(providedCtx context.Context) (*http.Response, error return res, nil } -// Do runs the request through the transport, handle the response and returns a deletemodel.Response -func (r DeleteModel) Do(providedCtx context.Context) (*Response, error) { +// Do runs the request through the transport, handle the response and returns a delete.Response +func (r Delete) Do(providedCtx context.Context) (*Response, error) { var ctx context.Context r.spanStarted = true if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - ctx = instrument.Start(providedCtx, "inference.delete_model") + ctx = instrument.Start(providedCtx, "inference.delete") defer instrument.Close(ctx) } if ctx == nil { @@ -264,11 +264,11 @@ func (r DeleteModel) Do(providedCtx context.Context) (*Response, error) { // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. -func (r DeleteModel) IsSuccess(providedCtx context.Context) (bool, error) { +func (r Delete) IsSuccess(providedCtx context.Context) (bool, error) { var ctx context.Context r.spanStarted = true if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - ctx = instrument.Start(providedCtx, "inference.delete_model") + ctx = instrument.Start(providedCtx, "inference.delete") defer instrument.Close(ctx) } if ctx == nil { @@ -280,7 +280,7 @@ func (r DeleteModel) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -291,7 +291,7 @@ func (r DeleteModel) IsSuccess(providedCtx context.Context) (bool, error) { } if res.StatusCode != 404 { - err := fmt.Errorf("an error happened during the DeleteModel query execution, status code: %d", res.StatusCode) + err := fmt.Errorf("an error happened during the Delete query execution, status code: %d", res.StatusCode) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, err) } @@ -301,8 +301,8 @@ func (r DeleteModel) IsSuccess(providedCtx context.Context) (bool, error) { return false, nil } -// Header set a key, value pair in the DeleteModel headers map. -func (r *DeleteModel) Header(key, value string) *DeleteModel { +// Header set a key, value pair in the Delete headers map. +func (r *Delete) Header(key, value string) *Delete { r.headers.Set(key, value) return r @@ -310,7 +310,7 @@ func (r *DeleteModel) Header(key, value string) *DeleteModel { // TaskType The task type // API Name: tasktype -func (r *DeleteModel) TaskType(tasktype string) *DeleteModel { +func (r *Delete) TaskType(tasktype string) *Delete { r.paramSet |= tasktypeMask r.tasktype = tasktype @@ -319,9 +319,71 @@ func (r *DeleteModel) TaskType(tasktype string) *DeleteModel { // InferenceId The inference Id // API Name: inferenceid -func (r *DeleteModel) _inferenceid(inferenceid string) *DeleteModel { +func (r *Delete) _inferenceid(inferenceid string) *Delete { r.paramSet |= inferenceidMask r.inferenceid = inferenceid return r } + +// DryRun When true, the endpoint is not deleted, and a list of ingest processors which +// reference this endpoint is returned +// API name: dry_run +func (r *Delete) DryRun(dryrun bool) *Delete { + r.values.Set("dry_run", strconv.FormatBool(dryrun)) + + return r +} + +// Force When true, the inference endpoint is forcefully deleted even if it is still +// being used by ingest processors or semantic text fields +// API name: force +func (r *Delete) Force(force bool) *Delete { + r.values.Set("force", strconv.FormatBool(force)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Delete) ErrorTrace(errortrace bool) *Delete { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Delete) FilterPath(filterpaths ...string) *Delete { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Delete) Human(human bool) *Delete { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Delete) Pretty(pretty bool) *Delete { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/inference/delete/response.go b/typedapi/inference/delete/response.go new file mode 100644 index 0000000000..9b02f9ca49 --- /dev/null +++ b/typedapi/inference/delete/response.go @@ -0,0 +1,38 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package delete + +// Response holds the response body struct for the package delete +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/delete/DeleteResponse.ts#L22-L24 +type Response struct { + + // Acknowledged For a successful response, this value is always true. On failure, an + // exception is returned instead. + Acknowledged bool `json:"acknowledged"` + Pipelines []string `json:"pipelines"` +} + +// NewResponse returns a Response +func NewResponse() *Response { + r := &Response{} + return r +} diff --git a/typedapi/inference/getmodel/get_model.go b/typedapi/inference/get/get.go similarity index 69% rename from typedapi/inference/getmodel/get_model.go rename to typedapi/inference/get/get.go index aea178e210..615b550085 100644 --- a/typedapi/inference/getmodel/get_model.go +++ b/typedapi/inference/get/get.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Get a model in the Inference API -package getmodel +// Get an inference endpoint +package get import ( "context" @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -45,7 +45,7 @@ const ( // ErrBuildPath is returned in case of missing parameters within the build of the request. var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") -type GetModel struct { +type Get struct { transport elastictransport.Interface headers http.Header @@ -64,26 +64,24 @@ type GetModel struct { instrument elastictransport.Instrumentation } -// NewGetModel type alias for index. -type NewGetModel func(inferenceid string) *GetModel +// NewGet type alias for index. +type NewGet func() *Get -// NewGetModelFunc returns a new instance of GetModel with the provided transport. +// NewGetFunc returns a new instance of Get with the provided transport. // Used in the index of the library this allows to retrieve every apis in once place. -func NewGetModelFunc(tp elastictransport.Interface) NewGetModel { - return func(inferenceid string) *GetModel { +func NewGetFunc(tp elastictransport.Interface) NewGet { + return func() *Get { n := New(tp) - n._inferenceid(inferenceid) - return n } } -// Get a model in the Inference API +// Get an inference endpoint // // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-inference-api.html -func New(tp elastictransport.Interface) *GetModel { - r := &GetModel{ +func New(tp elastictransport.Interface) *Get { + r := &Get{ transport: tp, values: make(url.Values), headers: make(http.Header), @@ -100,7 +98,7 @@ func New(tp elastictransport.Interface) *GetModel { // HttpRequest returns the http.Request object built from the // given parameters. -func (r *GetModel) HttpRequest(ctx context.Context) (*http.Request, error) { +func (r *Get) HttpRequest(ctx context.Context) (*http.Request, error) { var path strings.Builder var method string var req *http.Request @@ -110,6 +108,11 @@ func (r *GetModel) HttpRequest(ctx context.Context) (*http.Request, error) { r.path.Scheme = "http" switch { + case r.paramSet == 0: + path.WriteString("/") + path.WriteString("_inference") + + method = http.MethodGet case r.paramSet == inferenceidMask: path.WriteString("/") path.WriteString("_inference") @@ -167,11 +170,11 @@ func (r *GetModel) HttpRequest(ctx context.Context) (*http.Request, error) { } // Perform runs the http.Request through the provided transport and returns an http.Response. -func (r GetModel) Perform(providedCtx context.Context) (*http.Response, error) { +func (r Get) Perform(providedCtx context.Context) (*http.Response, error) { var ctx context.Context if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { if r.spanStarted == false { - ctx := instrument.Start(providedCtx, "inference.get_model") + ctx := instrument.Start(providedCtx, "inference.get") defer instrument.Close(ctx) } } @@ -188,17 +191,17 @@ func (r GetModel) Perform(providedCtx context.Context) (*http.Response, error) { } if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.BeforeRequest(req, "inference.get_model") - if reader := instrument.RecordRequestBody(ctx, "inference.get_model", r.raw); reader != nil { + instrument.BeforeRequest(req, "inference.get") + if reader := instrument.RecordRequestBody(ctx, "inference.get", r.raw); reader != nil { req.Body = reader } } res, err := r.transport.Perform(req) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "inference.get_model") + instrument.AfterRequest(req, "elasticsearch", "inference.get") } if err != nil { - localErr := fmt.Errorf("an error happened during the GetModel query execution: %w", err) + localErr := fmt.Errorf("an error happened during the Get query execution: %w", err) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, localErr) } @@ -208,12 +211,12 @@ func (r GetModel) Perform(providedCtx context.Context) (*http.Response, error) { return res, nil } -// Do runs the request through the transport, handle the response and returns a getmodel.Response -func (r GetModel) Do(providedCtx context.Context) (*Response, error) { +// Do runs the request through the transport, handle the response and returns a get.Response +func (r Get) Do(providedCtx context.Context) (*Response, error) { var ctx context.Context r.spanStarted = true if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - ctx = instrument.Start(providedCtx, "inference.get_model") + ctx = instrument.Start(providedCtx, "inference.get") defer instrument.Close(ctx) } if ctx == nil { @@ -264,11 +267,11 @@ func (r GetModel) Do(providedCtx context.Context) (*Response, error) { // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. -func (r GetModel) IsSuccess(providedCtx context.Context) (bool, error) { +func (r Get) IsSuccess(providedCtx context.Context) (bool, error) { var ctx context.Context r.spanStarted = true if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - ctx = instrument.Start(providedCtx, "inference.get_model") + ctx = instrument.Start(providedCtx, "inference.get") defer instrument.Close(ctx) } if ctx == nil { @@ -280,7 +283,7 @@ func (r GetModel) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -291,7 +294,7 @@ func (r GetModel) IsSuccess(providedCtx context.Context) (bool, error) { } if res.StatusCode != 404 { - err := fmt.Errorf("an error happened during the GetModel query execution, status code: %d", res.StatusCode) + err := fmt.Errorf("an error happened during the Get query execution, status code: %d", res.StatusCode) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, err) } @@ -301,8 +304,8 @@ func (r GetModel) IsSuccess(providedCtx context.Context) (bool, error) { return false, nil } -// Header set a key, value pair in the GetModel headers map. -func (r *GetModel) Header(key, value string) *GetModel { +// Header set a key, value pair in the Get headers map. +func (r *Get) Header(key, value string) *Get { r.headers.Set(key, value) return r @@ -310,7 +313,7 @@ func (r *GetModel) Header(key, value string) *GetModel { // TaskType The task type // API Name: tasktype -func (r *GetModel) TaskType(tasktype string) *GetModel { +func (r *Get) TaskType(tasktype string) *Get { r.paramSet |= tasktypeMask r.tasktype = tasktype @@ -319,9 +322,53 @@ func (r *GetModel) TaskType(tasktype string) *GetModel { // InferenceId The inference Id // API Name: inferenceid -func (r *GetModel) _inferenceid(inferenceid string) *GetModel { +func (r *Get) InferenceId(inferenceid string) *Get { r.paramSet |= inferenceidMask r.inferenceid = inferenceid return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Get) ErrorTrace(errortrace bool) *Get { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Get) FilterPath(filterpaths ...string) *Get { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Get) Human(human bool) *Get { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Get) Pretty(pretty bool) *Get { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/inference/getmodel/response.go b/typedapi/inference/get/response.go similarity index 72% rename from typedapi/inference/getmodel/response.go rename to typedapi/inference/get/response.go index 3dfda81b6a..4fc8ac2029 100644 --- a/typedapi/inference/getmodel/response.go +++ b/typedapi/inference/get/response.go @@ -16,19 +16,19 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -package getmodel +package get import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) -// Response holds the response body struct for the package getmodel +// Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/inference/get_model/GetModelResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/get/GetResponse.ts#L22-L26 type Response struct { - Models []types.ModelConfigContainer `json:"models"` + Endpoints []types.InferenceEndpointInfo `json:"endpoints"` } // NewResponse returns a Response diff --git a/typedapi/inference/inference/inference.go b/typedapi/inference/inference/inference.go index f3543c2eca..514661bb3e 100644 --- a/typedapi/inference/inference/inference.go +++ b/typedapi/inference/inference/inference.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Perform inference on a model +// Perform inference on the service package inference import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -83,7 +84,7 @@ func NewInferenceFunc(tp elastictransport.Interface) NewInference { } } -// Perform inference on a model +// Perform inference on the service // // https://www.elastic.co/guide/en/elasticsearch/reference/current/post-inference-api.html func New(tp elastictransport.Interface) *Inference { @@ -341,7 +342,59 @@ func (r *Inference) _inferenceid(inferenceid string) *Inference { return r } -// Input Text input to the model. +// Timeout Specifies the amount of time to wait for the inference request to complete. +// API name: timeout +func (r *Inference) Timeout(duration string) *Inference { + r.values.Set("timeout", duration) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Inference) ErrorTrace(errortrace bool) *Inference { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Inference) FilterPath(filterpaths ...string) *Inference { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Inference) Human(human bool) *Inference { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Inference) Pretty(pretty bool) *Inference { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + +// Input Inference input. // Either a string or an array of strings. // API name: input func (r *Inference) Input(inputs ...string) *Inference { @@ -350,6 +403,16 @@ func (r *Inference) Input(inputs ...string) *Inference { return r } +// Query Query input, required for rerank task. +// Not required for other tasks. +// API name: query +func (r *Inference) Query(query string) *Inference { + + r.req.Query = &query + + return r +} + // TaskSettings Optional task settings // API name: task_settings func (r *Inference) TaskSettings(tasksettings json.RawMessage) *Inference { diff --git a/typedapi/inference/inference/request.go b/typedapi/inference/inference/request.go index bc22515781..433dca4a3a 100644 --- a/typedapi/inference/inference/request.go +++ b/typedapi/inference/inference/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package inference @@ -26,16 +26,20 @@ import ( "errors" "fmt" "io" + "strconv" ) // Request holds the request body struct for the package inference // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/inference/inference/InferenceRequest.ts#L25-L53 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/inference/InferenceRequest.ts#L26-L66 type Request struct { - // Input Text input to the model. + // Input Inference input. // Either a string or an array of strings. Input []string `json:"input"` + // Query Query input, required for rerank task. + // Not required for other tasks. + Query *string `json:"query,omitempty"` // TaskSettings Optional task settings TaskSettings json.RawMessage `json:"task_settings,omitempty"` } @@ -43,6 +47,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -88,6 +93,18 @@ func (s *Request) UnmarshalJSON(data []byte) error { } } + case "query": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Query", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Query = &o + case "task_settings": if err := dec.Decode(&s.TaskSettings); err != nil { return fmt.Errorf("%s | %w", "TaskSettings", err) diff --git a/typedapi/inference/inference/response.go b/typedapi/inference/inference/response.go index f4ea838f56..79b69d91e6 100644 --- a/typedapi/inference/inference/response.go +++ b/typedapi/inference/inference/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package inference @@ -26,8 +26,10 @@ import ( // Response holds the response body struct for the package inference // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/inference/inference/InferenceResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/inference/InferenceResponse.ts#L22-L24 type Response struct { + Completion []types.CompletionResult `json:"completion,omitempty"` + Rerank []types.RankedDocument `json:"rerank,omitempty"` SparseEmbedding []types.SparseEmbeddingResult `json:"sparse_embedding,omitempty"` TextEmbedding []types.TextEmbeddingResult `json:"text_embedding,omitempty"` TextEmbeddingBytes []types.TextEmbeddingByteResult `json:"text_embedding_bytes,omitempty"` diff --git a/typedapi/inference/putmodel/put_model.go b/typedapi/inference/put/put.go similarity index 70% rename from typedapi/inference/putmodel/put_model.go rename to typedapi/inference/put/put.go index d2daf0ab2f..0eecc38bd6 100644 --- a/typedapi/inference/putmodel/put_model.go +++ b/typedapi/inference/put/put.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Configure a model for use in the Inference API -package putmodel +// Create an inference endpoint +package put import ( gobytes "bytes" @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -45,7 +46,7 @@ const ( // ErrBuildPath is returned in case of missing parameters within the build of the request. var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") -type PutModel struct { +type Put struct { transport elastictransport.Interface headers http.Header @@ -68,13 +69,13 @@ type PutModel struct { instrument elastictransport.Instrumentation } -// NewPutModel type alias for index. -type NewPutModel func(inferenceid string) *PutModel +// NewPut type alias for index. +type NewPut func(inferenceid string) *Put -// NewPutModelFunc returns a new instance of PutModel with the provided transport. +// NewPutFunc returns a new instance of Put with the provided transport. // Used in the index of the library this allows to retrieve every apis in once place. -func NewPutModelFunc(tp elastictransport.Interface) NewPutModel { - return func(inferenceid string) *PutModel { +func NewPutFunc(tp elastictransport.Interface) NewPut { + return func(inferenceid string) *Put { n := New(tp) n._inferenceid(inferenceid) @@ -83,16 +84,18 @@ func NewPutModelFunc(tp elastictransport.Interface) NewPutModel { } } -// Configure a model for use in the Inference API +// Create an inference endpoint // // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-inference-api.html -func New(tp elastictransport.Interface) *PutModel { - r := &PutModel{ +func New(tp elastictransport.Interface) *Put { + r := &Put{ transport: tp, values: make(url.Values), headers: make(http.Header), buf: gobytes.NewBuffer(nil), + + req: NewRequest(), } if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { @@ -106,14 +109,14 @@ func New(tp elastictransport.Interface) *PutModel { // Raw takes a json payload as input which is then passed to the http.Request // If specified Raw takes precedence on Request method. -func (r *PutModel) Raw(raw io.Reader) *PutModel { +func (r *Put) Raw(raw io.Reader) *Put { r.raw = raw return r } // Request allows to set the request property with the appropriate payload. -func (r *PutModel) Request(req *Request) *PutModel { +func (r *Put) Request(req *Request) *Put { r.req = req return r @@ -121,7 +124,7 @@ func (r *PutModel) Request(req *Request) *PutModel { // HttpRequest returns the http.Request object built from the // given parameters. -func (r *PutModel) HttpRequest(ctx context.Context) (*http.Request, error) { +func (r *Put) HttpRequest(ctx context.Context) (*http.Request, error) { var path strings.Builder var method string var req *http.Request @@ -142,7 +145,7 @@ func (r *PutModel) HttpRequest(ctx context.Context) (*http.Request, error) { data, err := json.Marshal(r.req) if err != nil { - return nil, fmt.Errorf("could not serialise request for PutModel: %w", err) + return nil, fmt.Errorf("could not serialise request for Put: %w", err) } r.buf.Write(data) @@ -219,11 +222,11 @@ func (r *PutModel) HttpRequest(ctx context.Context) (*http.Request, error) { } // Perform runs the http.Request through the provided transport and returns an http.Response. -func (r PutModel) Perform(providedCtx context.Context) (*http.Response, error) { +func (r Put) Perform(providedCtx context.Context) (*http.Response, error) { var ctx context.Context if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { if r.spanStarted == false { - ctx := instrument.Start(providedCtx, "inference.put_model") + ctx := instrument.Start(providedCtx, "inference.put") defer instrument.Close(ctx) } } @@ -240,17 +243,17 @@ func (r PutModel) Perform(providedCtx context.Context) (*http.Response, error) { } if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.BeforeRequest(req, "inference.put_model") - if reader := instrument.RecordRequestBody(ctx, "inference.put_model", r.raw); reader != nil { + instrument.BeforeRequest(req, "inference.put") + if reader := instrument.RecordRequestBody(ctx, "inference.put", r.raw); reader != nil { req.Body = reader } } res, err := r.transport.Perform(req) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "inference.put_model") + instrument.AfterRequest(req, "elasticsearch", "inference.put") } if err != nil { - localErr := fmt.Errorf("an error happened during the PutModel query execution: %w", err) + localErr := fmt.Errorf("an error happened during the Put query execution: %w", err) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, localErr) } @@ -260,12 +263,12 @@ func (r PutModel) Perform(providedCtx context.Context) (*http.Response, error) { return res, nil } -// Do runs the request through the transport, handle the response and returns a putmodel.Response -func (r PutModel) Do(providedCtx context.Context) (*Response, error) { +// Do runs the request through the transport, handle the response and returns a put.Response +func (r Put) Do(providedCtx context.Context) (*Response, error) { var ctx context.Context r.spanStarted = true if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - ctx = instrument.Start(providedCtx, "inference.put_model") + ctx = instrument.Start(providedCtx, "inference.put") defer instrument.Close(ctx) } if ctx == nil { @@ -314,8 +317,8 @@ func (r PutModel) Do(providedCtx context.Context) (*Response, error) { return nil, errorResponse } -// Header set a key, value pair in the PutModel headers map. -func (r *PutModel) Header(key, value string) *PutModel { +// Header set a key, value pair in the Put headers map. +func (r *Put) Header(key, value string) *Put { r.headers.Set(key, value) return r @@ -323,7 +326,7 @@ func (r *PutModel) Header(key, value string) *PutModel { // TaskType The task type // API Name: tasktype -func (r *PutModel) TaskType(tasktype string) *PutModel { +func (r *Put) TaskType(tasktype string) *Put { r.paramSet |= tasktypeMask r.tasktype = tasktype @@ -332,16 +335,60 @@ func (r *PutModel) TaskType(tasktype string) *PutModel { // InferenceId The inference Id // API Name: inferenceid -func (r *PutModel) _inferenceid(inferenceid string) *PutModel { +func (r *Put) _inferenceid(inferenceid string) *Put { r.paramSet |= inferenceidMask r.inferenceid = inferenceid return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Put) ErrorTrace(errortrace bool) *Put { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Put) FilterPath(filterpaths ...string) *Put { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Put) Human(human bool) *Put { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Put) Pretty(pretty bool) *Put { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Service The service type // API name: service -func (r *PutModel) Service(service string) *PutModel { +func (r *Put) Service(service string) *Put { r.req.Service = service @@ -350,15 +397,15 @@ func (r *PutModel) Service(service string) *PutModel { // ServiceSettings Settings specific to the service // API name: service_settings -func (r *PutModel) ServiceSettings(servicesettings json.RawMessage) *PutModel { +func (r *Put) ServiceSettings(servicesettings json.RawMessage) *Put { r.req.ServiceSettings = servicesettings return r } -// TaskSettings Task settings specific to the service and model +// TaskSettings Task settings specific to the service and task type // API name: task_settings -func (r *PutModel) TaskSettings(tasksettings json.RawMessage) *PutModel { +func (r *Put) TaskSettings(tasksettings json.RawMessage) *Put { r.req.TaskSettings = tasksettings return r diff --git a/typedapi/inference/putmodel/request.go b/typedapi/inference/put/request.go similarity index 66% rename from typedapi/inference/putmodel/request.go rename to typedapi/inference/put/request.go index 243da653e1..727e727cc2 100644 --- a/typedapi/inference/putmodel/request.go +++ b/typedapi/inference/put/request.go @@ -16,15 +16,22 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -package putmodel +package put import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) -// Request holds the request body struct for the package putmodel +// Request holds the request body struct for the package put // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/inference/put_model/PutModelRequest.ts#L25-L44 -type Request = types.ModelConfig +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/put/PutRequest.ts#L25-L44 +type Request = types.InferenceEndpoint + +// NewRequest returns a Request +func NewRequest() *Request { + r := types.NewInferenceEndpoint() + + return r +} diff --git a/typedapi/inference/putmodel/response.go b/typedapi/inference/put/response.go similarity index 72% rename from typedapi/inference/putmodel/response.go rename to typedapi/inference/put/response.go index fb30b5b1a7..7500403609 100644 --- a/typedapi/inference/putmodel/response.go +++ b/typedapi/inference/put/response.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -package putmodel +package put import ( "encoding/json" @@ -26,20 +26,20 @@ import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/tasktype" ) -// Response holds the response body struct for the package putmodel +// Response holds the response body struct for the package put // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/inference/put_model/PutModelResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/put/PutResponse.ts#L22-L24 type Response struct { - // ModelId The model Id - ModelId string `json:"model_id"` + // InferenceId The inference Id + InferenceId string `json:"inference_id"` // Service The service type Service string `json:"service"` // ServiceSettings Settings specific to the service ServiceSettings json.RawMessage `json:"service_settings"` - // TaskSettings Task settings specific to the service and model + // TaskSettings Task settings specific to the service and task type TaskSettings json.RawMessage `json:"task_settings"` - // TaskType The model's task type + // TaskType The task type TaskType tasktype.TaskType `json:"task_type"` } diff --git a/typedapi/ingest/deletepipeline/delete_pipeline.go b/typedapi/ingest/deletepipeline/delete_pipeline.go index e14b9c0772..90388750ff 100644 --- a/typedapi/ingest/deletepipeline/delete_pipeline.go +++ b/typedapi/ingest/deletepipeline/delete_pipeline.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes a pipeline. +// Deletes one or more existing ingest pipeline. package deletepipeline import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +76,7 @@ func NewDeletePipelineFunc(tp elastictransport.Interface) NewDeletePipeline { } } -// Deletes a pipeline. +// Deletes one or more existing ingest pipeline. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-pipeline-api.html func New(tp elastictransport.Interface) *DeletePipeline { @@ -262,7 +262,7 @@ func (r DeletePipeline) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -319,3 +319,47 @@ func (r *DeletePipeline) Timeout(duration string) *DeletePipeline { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeletePipeline) ErrorTrace(errortrace bool) *DeletePipeline { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeletePipeline) FilterPath(filterpaths ...string) *DeletePipeline { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeletePipeline) Human(human bool) *DeletePipeline { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeletePipeline) Pretty(pretty bool) *DeletePipeline { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ingest/deletepipeline/response.go b/typedapi/ingest/deletepipeline/response.go index b9849fbcf0..fe5bab6361 100644 --- a/typedapi/ingest/deletepipeline/response.go +++ b/typedapi/ingest/deletepipeline/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletepipeline // Response holds the response body struct for the package deletepipeline // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/delete_pipeline/DeletePipelineResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/delete_pipeline/DeletePipelineResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ingest/geoipstats/geo_ip_stats.go b/typedapi/ingest/geoipstats/geo_ip_stats.go index fcb79d946f..cf5f8038f1 100644 --- a/typedapi/ingest/geoipstats/geo_ip_stats.go +++ b/typedapi/ingest/geoipstats/geo_ip_stats.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns statistical information about geoip databases +// Gets download statistics for GeoIP2 databases used with the geoip processor. package geoipstats import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -68,7 +68,7 @@ func NewGeoIpStatsFunc(tp elastictransport.Interface) NewGeoIpStats { } } -// Returns statistical information about geoip databases +// Gets download statistics for GeoIP2 databases used with the geoip processor. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/geoip-processor.html func New(tp elastictransport.Interface) *GeoIpStats { @@ -250,7 +250,7 @@ func (r GeoIpStats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -277,3 +277,47 @@ func (r *GeoIpStats) Header(key, value string) *GeoIpStats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GeoIpStats) ErrorTrace(errortrace bool) *GeoIpStats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GeoIpStats) FilterPath(filterpaths ...string) *GeoIpStats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GeoIpStats) Human(human bool) *GeoIpStats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GeoIpStats) Pretty(pretty bool) *GeoIpStats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ingest/geoipstats/response.go b/typedapi/ingest/geoipstats/response.go index 881673fc89..827b683bbf 100644 --- a/typedapi/ingest/geoipstats/response.go +++ b/typedapi/ingest/geoipstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package geoipstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package geoipstats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/geo_ip_stats/IngestGeoIpStatsResponse.ts#L24-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/geo_ip_stats/IngestGeoIpStatsResponse.ts#L24-L31 type Response struct { // Nodes Downloaded GeoIP2 databases for each node. diff --git a/typedapi/ingest/getpipeline/get_pipeline.go b/typedapi/ingest/getpipeline/get_pipeline.go index 1170031902..7fb296cb43 100644 --- a/typedapi/ingest/getpipeline/get_pipeline.go +++ b/typedapi/ingest/getpipeline/get_pipeline.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns a pipeline. +// Returns information about one or more ingest pipelines. +// This API returns a local reference of the pipeline. package getpipeline import ( @@ -27,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -75,7 +75,8 @@ func NewGetPipelineFunc(tp elastictransport.Interface) NewGetPipeline { } } -// Returns a pipeline. +// Returns information about one or more ingest pipelines. +// This API returns a local reference of the pipeline. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-pipeline-api.html func New(tp elastictransport.Interface) *GetPipeline { @@ -268,7 +269,7 @@ func (r GetPipeline) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -324,3 +325,47 @@ func (r *GetPipeline) Summary(summary bool) *GetPipeline { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetPipeline) ErrorTrace(errortrace bool) *GetPipeline { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetPipeline) FilterPath(filterpaths ...string) *GetPipeline { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetPipeline) Human(human bool) *GetPipeline { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetPipeline) Pretty(pretty bool) *GetPipeline { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ingest/getpipeline/response.go b/typedapi/ingest/getpipeline/response.go index d7552dd232..ee53c07736 100644 --- a/typedapi/ingest/getpipeline/response.go +++ b/typedapi/ingest/getpipeline/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getpipeline @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getpipeline // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/get_pipeline/GetPipelineResponse.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/get_pipeline/GetPipelineResponse.ts#L23-L26 type Response map[string]types.IngestPipeline diff --git a/typedapi/ingest/processorgrok/processor_grok.go b/typedapi/ingest/processorgrok/processor_grok.go index 6bce00e2bb..c963570085 100644 --- a/typedapi/ingest/processorgrok/processor_grok.go +++ b/typedapi/ingest/processorgrok/processor_grok.go @@ -16,9 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns a list of the built-in patterns. +// Extracts structured fields out of a single text field within a document. +// You choose which field to extract matched fields from, as well as the grok +// pattern you expect will match. +// A grok pattern is like a regular expression that supports aliased expressions +// that can be reused. package processorgrok import ( @@ -27,9 +31,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -68,7 +72,11 @@ func NewProcessorGrokFunc(tp elastictransport.Interface) NewProcessorGrok { } } -// Returns a list of the built-in patterns. +// Extracts structured fields out of a single text field within a document. +// You choose which field to extract matched fields from, as well as the grok +// pattern you expect will match. +// A grok pattern is like a regular expression that supports aliased expressions +// that can be reused. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/grok-processor.html func New(tp elastictransport.Interface) *ProcessorGrok { @@ -250,7 +258,7 @@ func (r ProcessorGrok) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -277,3 +285,47 @@ func (r *ProcessorGrok) Header(key, value string) *ProcessorGrok { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ProcessorGrok) ErrorTrace(errortrace bool) *ProcessorGrok { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ProcessorGrok) FilterPath(filterpaths ...string) *ProcessorGrok { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ProcessorGrok) Human(human bool) *ProcessorGrok { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ProcessorGrok) Pretty(pretty bool) *ProcessorGrok { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ingest/processorgrok/response.go b/typedapi/ingest/processorgrok/response.go index 078287b08c..88d4948f8b 100644 --- a/typedapi/ingest/processorgrok/response.go +++ b/typedapi/ingest/processorgrok/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package processorgrok // Response holds the response body struct for the package processorgrok // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/processor_grok/GrokProcessorPatternsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/processor_grok/GrokProcessorPatternsResponse.ts#L22-L24 type Response struct { Patterns map[string]string `json:"patterns"` } diff --git a/typedapi/ingest/putpipeline/put_pipeline.go b/typedapi/ingest/putpipeline/put_pipeline.go index 131f7fee6a..e4981cf96f 100644 --- a/typedapi/ingest/putpipeline/put_pipeline.go +++ b/typedapi/ingest/putpipeline/put_pipeline.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates or updates a pipeline. +// Creates or updates an ingest pipeline. +// Changes made using this API take effect immediately. package putpipeline import ( @@ -30,6 +31,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -80,7 +82,8 @@ func NewPutPipelineFunc(tp elastictransport.Interface) NewPutPipeline { } } -// Creates or updates a pipeline. +// Creates or updates an ingest pipeline. +// Changes made using this API take effect immediately. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ingest.html func New(tp elastictransport.Interface) *PutPipeline { @@ -340,6 +343,50 @@ func (r *PutPipeline) IfVersion(versionnumber string) *PutPipeline { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutPipeline) ErrorTrace(errortrace bool) *PutPipeline { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutPipeline) FilterPath(filterpaths ...string) *PutPipeline { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutPipeline) Human(human bool) *PutPipeline { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutPipeline) Pretty(pretty bool) *PutPipeline { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Description Description of the ingest pipeline. // API name: description func (r *PutPipeline) Description(description string) *PutPipeline { diff --git a/typedapi/ingest/putpipeline/request.go b/typedapi/ingest/putpipeline/request.go index e3f64f1e10..f41254b43d 100644 --- a/typedapi/ingest/putpipeline/request.go +++ b/typedapi/ingest/putpipeline/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putpipeline @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putpipeline // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/put_pipeline/PutPipelineRequest.ts#L25-L77 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/put_pipeline/PutPipelineRequest.ts#L25-L77 type Request struct { // Description Description of the ingest pipeline. @@ -60,6 +60,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ingest/putpipeline/response.go b/typedapi/ingest/putpipeline/response.go index a590d2f70c..3dd2082190 100644 --- a/typedapi/ingest/putpipeline/response.go +++ b/typedapi/ingest/putpipeline/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putpipeline // Response holds the response body struct for the package putpipeline // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/put_pipeline/PutPipelineResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/put_pipeline/PutPipelineResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ingest/simulate/request.go b/typedapi/ingest/simulate/request.go index 4128b2267d..74bccb98c2 100644 --- a/typedapi/ingest/simulate/request.go +++ b/typedapi/ingest/simulate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package simulate @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package simulate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/simulate/SimulatePipelineRequest.ts#L25-L57 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/simulate/SimulatePipelineRequest.ts#L25-L57 type Request struct { // Docs Sample documents to test in the pipeline. @@ -45,6 +45,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ingest/simulate/response.go b/typedapi/ingest/simulate/response.go index d5375b50d6..dc27fac467 100644 --- a/typedapi/ingest/simulate/response.go +++ b/typedapi/ingest/simulate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package simulate @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package simulate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/simulate/SimulatePipelineResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/simulate/SimulatePipelineResponse.ts#L22-L24 type Response struct { Docs []types.PipelineSimulation `json:"docs"` } diff --git a/typedapi/ingest/simulate/simulate.go b/typedapi/ingest/simulate/simulate.go index 3297f014fc..2b6ea2117c 100644 --- a/typedapi/ingest/simulate/simulate.go +++ b/typedapi/ingest/simulate/simulate.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Allows to simulate a pipeline with example documents. +// Executes an ingest pipeline against a set of provided documents. package simulate import ( @@ -79,7 +79,7 @@ func NewSimulateFunc(tp elastictransport.Interface) NewSimulate { } } -// Allows to simulate a pipeline with example documents. +// Executes an ingest pipeline against a set of provided documents. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/simulate-pipeline-api.html func New(tp elastictransport.Interface) *Simulate { @@ -335,6 +335,50 @@ func (r *Simulate) Verbose(verbose bool) *Simulate { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Simulate) ErrorTrace(errortrace bool) *Simulate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Simulate) FilterPath(filterpaths ...string) *Simulate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Simulate) Human(human bool) *Simulate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Simulate) Pretty(pretty bool) *Simulate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Docs Sample documents to test in the pipeline. // API name: docs func (r *Simulate) Docs(docs ...types.Document) *Simulate { diff --git a/typedapi/license/delete/delete.go b/typedapi/license/delete/delete.go index 85bcc35375..6c6a708187 100644 --- a/typedapi/license/delete/delete.go +++ b/typedapi/license/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes licensing information for the cluster package delete @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -246,7 +246,7 @@ func (r Delete) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -273,3 +273,47 @@ func (r *Delete) Header(key, value string) *Delete { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Delete) ErrorTrace(errortrace bool) *Delete { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Delete) FilterPath(filterpaths ...string) *Delete { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Delete) Human(human bool) *Delete { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Delete) Pretty(pretty bool) *Delete { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/license/delete/response.go b/typedapi/license/delete/response.go index f062703ed5..ec262eaed4 100644 --- a/typedapi/license/delete/response.go +++ b/typedapi/license/delete/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package delete // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/license/delete/DeleteLicenseResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/license/delete/DeleteLicenseResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/license/get/get.go b/typedapi/license/get/get.go index 34aff0a995..ce623658c6 100644 --- a/typedapi/license/get/get.go +++ b/typedapi/license/get/get.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Retrieves licensing information for the cluster +// This API returns information about the type of license, when it was issued, +// and when it expires, for example. +// For more information about the different types of licenses, see +// https://www.elastic.co/subscriptions. package get import ( @@ -27,7 +30,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -69,7 +71,10 @@ func NewGetFunc(tp elastictransport.Interface) NewGet { } } -// Retrieves licensing information for the cluster +// This API returns information about the type of license, when it was issued, +// and when it expires, for example. +// For more information about the different types of licenses, see +// https://www.elastic.co/subscriptions. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-license.html func New(tp elastictransport.Interface) *Get { @@ -247,7 +252,7 @@ func (r Get) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -294,3 +299,47 @@ func (r *Get) Local(local bool) *Get { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Get) ErrorTrace(errortrace bool) *Get { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Get) FilterPath(filterpaths ...string) *Get { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Get) Human(human bool) *Get { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Get) Pretty(pretty bool) *Get { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/license/get/response.go b/typedapi/license/get/response.go index fd107c2416..bac79b8f5a 100644 --- a/typedapi/license/get/response.go +++ b/typedapi/license/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/license/get/GetLicenseResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/license/get/GetLicenseResponse.ts#L22-L24 type Response struct { License types.LicenseInformation `json:"license"` } diff --git a/typedapi/license/getbasicstatus/get_basic_status.go b/typedapi/license/getbasicstatus/get_basic_status.go index 2d7bb7d424..2badf90766 100644 --- a/typedapi/license/getbasicstatus/get_basic_status.go +++ b/typedapi/license/getbasicstatus/get_basic_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves information about the status of the basic license. package getbasicstatus @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -248,7 +248,7 @@ func (r GetBasicStatus) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +275,47 @@ func (r *GetBasicStatus) Header(key, value string) *GetBasicStatus { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetBasicStatus) ErrorTrace(errortrace bool) *GetBasicStatus { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetBasicStatus) FilterPath(filterpaths ...string) *GetBasicStatus { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetBasicStatus) Human(human bool) *GetBasicStatus { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetBasicStatus) Pretty(pretty bool) *GetBasicStatus { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/license/getbasicstatus/response.go b/typedapi/license/getbasicstatus/response.go index 5b162b8038..9f4061d81e 100644 --- a/typedapi/license/getbasicstatus/response.go +++ b/typedapi/license/getbasicstatus/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getbasicstatus // Response holds the response body struct for the package getbasicstatus // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/license/get_basic_status/GetBasicLicenseStatusResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/license/get_basic_status/GetBasicLicenseStatusResponse.ts#L20-L22 type Response struct { EligibleToStartBasic bool `json:"eligible_to_start_basic"` } diff --git a/typedapi/license/gettrialstatus/get_trial_status.go b/typedapi/license/gettrialstatus/get_trial_status.go index 00f7eab8a6..49f68addd7 100644 --- a/typedapi/license/gettrialstatus/get_trial_status.go +++ b/typedapi/license/gettrialstatus/get_trial_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves information about the status of the trial license. package gettrialstatus @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -248,7 +248,7 @@ func (r GetTrialStatus) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +275,47 @@ func (r *GetTrialStatus) Header(key, value string) *GetTrialStatus { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetTrialStatus) ErrorTrace(errortrace bool) *GetTrialStatus { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetTrialStatus) FilterPath(filterpaths ...string) *GetTrialStatus { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetTrialStatus) Human(human bool) *GetTrialStatus { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetTrialStatus) Pretty(pretty bool) *GetTrialStatus { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/license/gettrialstatus/response.go b/typedapi/license/gettrialstatus/response.go index 5fa1f5bd0b..baef7edf21 100644 --- a/typedapi/license/gettrialstatus/response.go +++ b/typedapi/license/gettrialstatus/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package gettrialstatus // Response holds the response body struct for the package gettrialstatus // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/license/get_trial_status/GetTrialLicenseStatusResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/license/get_trial_status/GetTrialLicenseStatusResponse.ts#L20-L22 type Response struct { EligibleToStartTrial bool `json:"eligible_to_start_trial"` } diff --git a/typedapi/license/post/post.go b/typedapi/license/post/post.go index 188967663e..26729f5d93 100644 --- a/typedapi/license/post/post.go +++ b/typedapi/license/post/post.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Updates the license for the cluster. package post @@ -298,6 +298,50 @@ func (r *Post) Acknowledge(acknowledge bool) *Post { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Post) ErrorTrace(errortrace bool) *Post { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Post) FilterPath(filterpaths ...string) *Post { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Post) Human(human bool) *Post { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Post) Pretty(pretty bool) *Post { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: license func (r *Post) License(license *types.License) *Post { diff --git a/typedapi/license/post/request.go b/typedapi/license/post/request.go index 412a956ff7..7e4ad62ab8 100644 --- a/typedapi/license/post/request.go +++ b/typedapi/license/post/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package post @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package post // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/license/post/PostLicenseRequest.ts#L23-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/license/post/PostLicenseRequest.ts#L23-L43 type Request struct { License *types.License `json:"license,omitempty"` // Licenses A sequence of one or more JSON documents containing the license information. @@ -39,6 +39,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/license/post/response.go b/typedapi/license/post/response.go index 2c754ad1b7..ffe32c5dfa 100644 --- a/typedapi/license/post/response.go +++ b/typedapi/license/post/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package post @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package post // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/license/post/PostLicenseResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/license/post/PostLicenseResponse.ts#L23-L29 type Response struct { Acknowledge *types.Acknowledgement `json:"acknowledge,omitempty"` Acknowledged bool `json:"acknowledged"` diff --git a/typedapi/license/poststartbasic/post_start_basic.go b/typedapi/license/poststartbasic/post_start_basic.go index 5edc492f97..87ebf70287 100644 --- a/typedapi/license/poststartbasic/post_start_basic.go +++ b/typedapi/license/poststartbasic/post_start_basic.go @@ -16,9 +16,15 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Starts an indefinite basic license. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// The start basic API enables you to initiate an indefinite basic license, +// which gives access to all the basic features. If the basic license does not +// support all of the features that are available with your current license, +// however, you are notified in the response. You must then re-submit the API +// request with the acknowledge parameter set to true. +// To check the status of your basic license, use the following API: [Get basic +// status](https://www.elastic.co/guide/en/elasticsearch/reference/current/get-basic-status.html). package poststartbasic import ( @@ -27,7 +33,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -69,7 +74,13 @@ func NewPostStartBasicFunc(tp elastictransport.Interface) NewPostStartBasic { } } -// Starts an indefinite basic license. +// The start basic API enables you to initiate an indefinite basic license, +// which gives access to all the basic features. If the basic license does not +// support all of the features that are available with your current license, +// however, you are notified in the response. You must then re-submit the API +// request with the acknowledge parameter set to true. +// To check the status of your basic license, use the following API: [Get basic +// status](https://www.elastic.co/guide/en/elasticsearch/reference/current/get-basic-status.html). // // https://www.elastic.co/guide/en/elasticsearch/reference/current/start-basic.html func New(tp elastictransport.Interface) *PostStartBasic { @@ -249,7 +260,7 @@ func (r PostStartBasic) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -284,3 +295,47 @@ func (r *PostStartBasic) Acknowledge(acknowledge bool) *PostStartBasic { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PostStartBasic) ErrorTrace(errortrace bool) *PostStartBasic { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PostStartBasic) FilterPath(filterpaths ...string) *PostStartBasic { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PostStartBasic) Human(human bool) *PostStartBasic { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PostStartBasic) Pretty(pretty bool) *PostStartBasic { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/license/poststartbasic/response.go b/typedapi/license/poststartbasic/response.go index a1a1ef5170..95623d6366 100644 --- a/typedapi/license/poststartbasic/response.go +++ b/typedapi/license/poststartbasic/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package poststartbasic @@ -33,7 +33,7 @@ import ( // Response holds the response body struct for the package poststartbasic // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/license/post_start_basic/StartBasicLicenseResponse.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/license/post_start_basic/StartBasicLicenseResponse.ts#L23-L31 type Response struct { Acknowledge map[string][]string `json:"acknowledge,omitempty"` Acknowledged bool `json:"acknowledged"` @@ -90,7 +90,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "acknowledged": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -104,7 +104,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "basic_was_started": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/license/poststarttrial/post_start_trial.go b/typedapi/license/poststarttrial/post_start_trial.go index e55201ef14..971ac0e570 100644 --- a/typedapi/license/poststarttrial/post_start_trial.go +++ b/typedapi/license/poststarttrial/post_start_trial.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// starts a limited time trial license. +// The start trial API enables you to start a 30-day trial, which gives access +// to all subscription features. package poststarttrial import ( @@ -27,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -69,7 +69,8 @@ func NewPostStartTrialFunc(tp elastictransport.Interface) NewPostStartTrial { } } -// starts a limited time trial license. +// The start trial API enables you to start a 30-day trial, which gives access +// to all subscription features. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/start-trial.html func New(tp elastictransport.Interface) *PostStartTrial { @@ -249,7 +250,7 @@ func (r PostStartTrial) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -291,3 +292,47 @@ func (r *PostStartTrial) TypeQueryString(typequerystring string) *PostStartTrial return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PostStartTrial) ErrorTrace(errortrace bool) *PostStartTrial { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PostStartTrial) FilterPath(filterpaths ...string) *PostStartTrial { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PostStartTrial) Human(human bool) *PostStartTrial { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PostStartTrial) Pretty(pretty bool) *PostStartTrial { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/license/poststarttrial/response.go b/typedapi/license/poststarttrial/response.go index 3de8c2d42b..bbe00e0b4c 100644 --- a/typedapi/license/poststarttrial/response.go +++ b/typedapi/license/poststarttrial/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package poststarttrial @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package poststarttrial // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/license/post_start_trial/StartTrialLicenseResponse.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/license/post_start_trial/StartTrialLicenseResponse.ts#L22-L29 type Response struct { Acknowledged bool `json:"acknowledged"` ErrorMessage *string `json:"error_message,omitempty"` diff --git a/typedapi/logstash/deletepipeline/delete_pipeline.go b/typedapi/logstash/deletepipeline/delete_pipeline.go index bfe47c159d..347e23c9a0 100644 --- a/typedapi/logstash/deletepipeline/delete_pipeline.go +++ b/typedapi/logstash/deletepipeline/delete_pipeline.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes Logstash Pipelines used by Central Management +// Deletes a pipeline used for Logstash Central Management. package deletepipeline import ( @@ -26,9 +26,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -74,7 +74,7 @@ func NewDeletePipelineFunc(tp elastictransport.Interface) NewDeletePipeline { } } -// Deletes Logstash Pipelines used by Central Management +// Deletes a pipeline used for Logstash Central Management. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-api-delete-pipeline.html func New(tp elastictransport.Interface) *DeletePipeline { @@ -211,7 +211,7 @@ func (r DeletePipeline) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -247,3 +247,47 @@ func (r *DeletePipeline) _id(id string) *DeletePipeline { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeletePipeline) ErrorTrace(errortrace bool) *DeletePipeline { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeletePipeline) FilterPath(filterpaths ...string) *DeletePipeline { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeletePipeline) Human(human bool) *DeletePipeline { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeletePipeline) Pretty(pretty bool) *DeletePipeline { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/logstash/getpipeline/get_pipeline.go b/typedapi/logstash/getpipeline/get_pipeline.go index 3f0b375768..b01b6b19c0 100644 --- a/typedapi/logstash/getpipeline/get_pipeline.go +++ b/typedapi/logstash/getpipeline/get_pipeline.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Retrieves Logstash Pipelines used by Central Management +// Retrieves pipelines used for Logstash Central Management. package getpipeline import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -74,7 +74,7 @@ func NewGetPipelineFunc(tp elastictransport.Interface) NewGetPipeline { } } -// Retrieves Logstash Pipelines used by Central Management +// Retrieves pipelines used for Logstash Central Management. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-api-get-pipeline.html func New(tp elastictransport.Interface) *GetPipeline { @@ -267,7 +267,7 @@ func (r GetPipeline) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -303,3 +303,47 @@ func (r *GetPipeline) Id(id string) *GetPipeline { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetPipeline) ErrorTrace(errortrace bool) *GetPipeline { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetPipeline) FilterPath(filterpaths ...string) *GetPipeline { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetPipeline) Human(human bool) *GetPipeline { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetPipeline) Pretty(pretty bool) *GetPipeline { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/logstash/getpipeline/response.go b/typedapi/logstash/getpipeline/response.go index 717e7b5518..0490ff532b 100644 --- a/typedapi/logstash/getpipeline/response.go +++ b/typedapi/logstash/getpipeline/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getpipeline @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getpipeline // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/logstash/get_pipeline/LogstashGetPipelineResponse.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/logstash/get_pipeline/LogstashGetPipelineResponse.ts#L24-L27 type Response map[string]types.LogstashPipeline diff --git a/typedapi/logstash/putpipeline/put_pipeline.go b/typedapi/logstash/putpipeline/put_pipeline.go index 77ed854acc..2117cbb4e4 100644 --- a/typedapi/logstash/putpipeline/put_pipeline.go +++ b/typedapi/logstash/putpipeline/put_pipeline.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Adds and updates Logstash Pipelines used for Central Management +// Creates or updates a pipeline used for Logstash Central Management. package putpipeline import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -80,7 +81,7 @@ func NewPutPipelineFunc(tp elastictransport.Interface) NewPutPipeline { } } -// Adds and updates Logstash Pipelines used for Central Management +// Creates or updates a pipeline used for Logstash Central Management. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/logstash-api-put-pipeline.html func New(tp elastictransport.Interface) *PutPipeline { @@ -90,6 +91,8 @@ func New(tp elastictransport.Interface) *PutPipeline { headers: make(http.Header), buf: gobytes.NewBuffer(nil), + + req: NewRequest(), } if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { @@ -258,6 +261,50 @@ func (r *PutPipeline) _id(id string) *PutPipeline { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutPipeline) ErrorTrace(errortrace bool) *PutPipeline { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutPipeline) FilterPath(filterpaths ...string) *PutPipeline { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutPipeline) Human(human bool) *PutPipeline { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutPipeline) Pretty(pretty bool) *PutPipeline { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Description Description of the pipeline. // This description is not used by Elasticsearch or Logstash. // API name: description diff --git a/typedapi/logstash/putpipeline/request.go b/typedapi/logstash/putpipeline/request.go index 7418f6b824..2b5c322fea 100644 --- a/typedapi/logstash/putpipeline/request.go +++ b/typedapi/logstash/putpipeline/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putpipeline @@ -26,5 +26,12 @@ import ( // Request holds the request body struct for the package putpipeline // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/logstash/put_pipeline/LogstashPutPipelineRequest.ts#L24-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/logstash/put_pipeline/LogstashPutPipelineRequest.ts#L24-L39 type Request = types.LogstashPipeline + +// NewRequest returns a Request +func NewRequest() *Request { + r := types.NewLogstashPipeline() + + return r +} diff --git a/typedapi/migration/deprecations/deprecations.go b/typedapi/migration/deprecations/deprecations.go index 640bb10ee9..c4706d5ddf 100644 --- a/typedapi/migration/deprecations/deprecations.go +++ b/typedapi/migration/deprecations/deprecations.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves information about different cluster, node, and index level settings // that use deprecated features that will be removed or changed in the next @@ -29,9 +29,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -271,7 +271,7 @@ func (r Deprecations) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -308,3 +308,47 @@ func (r *Deprecations) Index(index string) *Deprecations { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Deprecations) ErrorTrace(errortrace bool) *Deprecations { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Deprecations) FilterPath(filterpaths ...string) *Deprecations { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Deprecations) Human(human bool) *Deprecations { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Deprecations) Pretty(pretty bool) *Deprecations { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/migration/deprecations/response.go b/typedapi/migration/deprecations/response.go index 230f7e4878..363d426211 100644 --- a/typedapi/migration/deprecations/response.go +++ b/typedapi/migration/deprecations/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deprecations @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package deprecations // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/migration/deprecations/DeprecationInfoResponse.ts#L23-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/migration/deprecations/DeprecationInfoResponse.ts#L23-L30 type Response struct { ClusterSettings []types.Deprecation `json:"cluster_settings"` IndexSettings map[string][]types.Deprecation `json:"index_settings"` diff --git a/typedapi/migration/getfeatureupgradestatus/get_feature_upgrade_status.go b/typedapi/migration/getfeatureupgradestatus/get_feature_upgrade_status.go index 02de91d165..6a2c25c9c5 100644 --- a/typedapi/migration/getfeatureupgradestatus/get_feature_upgrade_status.go +++ b/typedapi/migration/getfeatureupgradestatus/get_feature_upgrade_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Find out whether system features need to be upgraded or not package getfeatureupgradestatus @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -248,7 +248,7 @@ func (r GetFeatureUpgradeStatus) IsSuccess(providedCtx context.Context) (bool, e if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +275,47 @@ func (r *GetFeatureUpgradeStatus) Header(key, value string) *GetFeatureUpgradeSt return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetFeatureUpgradeStatus) ErrorTrace(errortrace bool) *GetFeatureUpgradeStatus { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetFeatureUpgradeStatus) FilterPath(filterpaths ...string) *GetFeatureUpgradeStatus { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetFeatureUpgradeStatus) Human(human bool) *GetFeatureUpgradeStatus { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetFeatureUpgradeStatus) Pretty(pretty bool) *GetFeatureUpgradeStatus { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/migration/getfeatureupgradestatus/response.go b/typedapi/migration/getfeatureupgradestatus/response.go index 1971f4f993..a6d1f92516 100644 --- a/typedapi/migration/getfeatureupgradestatus/response.go +++ b/typedapi/migration/getfeatureupgradestatus/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getfeatureupgradestatus @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package getfeatureupgradestatus // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L23-L28 type Response struct { Features []types.GetMigrationFeature `json:"features"` MigrationStatus migrationstatus.MigrationStatus `json:"migration_status"` diff --git a/typedapi/migration/postfeatureupgrade/post_feature_upgrade.go b/typedapi/migration/postfeatureupgrade/post_feature_upgrade.go index 4383c2b08b..0432ce918f 100644 --- a/typedapi/migration/postfeatureupgrade/post_feature_upgrade.go +++ b/typedapi/migration/postfeatureupgrade/post_feature_upgrade.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Begin upgrades for system features package postfeatureupgrade @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -248,7 +248,7 @@ func (r PostFeatureUpgrade) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +275,47 @@ func (r *PostFeatureUpgrade) Header(key, value string) *PostFeatureUpgrade { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PostFeatureUpgrade) ErrorTrace(errortrace bool) *PostFeatureUpgrade { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PostFeatureUpgrade) FilterPath(filterpaths ...string) *PostFeatureUpgrade { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PostFeatureUpgrade) Human(human bool) *PostFeatureUpgrade { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PostFeatureUpgrade) Pretty(pretty bool) *PostFeatureUpgrade { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/migration/postfeatureupgrade/response.go b/typedapi/migration/postfeatureupgrade/response.go index 2b67ff51a2..396c5b11d2 100644 --- a/typedapi/migration/postfeatureupgrade/response.go +++ b/typedapi/migration/postfeatureupgrade/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package postfeatureupgrade @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package postfeatureupgrade // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/migration/post_feature_upgrade/PostFeatureUpgradeResponse.ts#L20-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/migration/post_feature_upgrade/PostFeatureUpgradeResponse.ts#L20-L25 type Response struct { Accepted bool `json:"accepted"` Features []types.PostMigrationFeature `json:"features"` diff --git a/typedapi/ml/cleartrainedmodeldeploymentcache/clear_trained_model_deployment_cache.go b/typedapi/ml/cleartrainedmodeldeploymentcache/clear_trained_model_deployment_cache.go index d4b15ed341..f1f5016e9a 100644 --- a/typedapi/ml/cleartrainedmodeldeploymentcache/clear_trained_model_deployment_cache.go +++ b/typedapi/ml/cleartrainedmodeldeploymentcache/clear_trained_model_deployment_cache.go @@ -16,9 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Clear the cached results from a trained model deployment +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Clears a trained model deployment cache on all nodes where the trained model +// is assigned. +// A trained model deployment may have an inference cache enabled. +// As requests are handled by each allocated node, their responses may be cached +// on that individual node. +// Calling this API clears the caches without restarting the deployment. package cleartrainedmodeldeploymentcache import ( @@ -27,9 +32,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +81,12 @@ func NewClearTrainedModelDeploymentCacheFunc(tp elastictransport.Interface) NewC } } -// Clear the cached results from a trained model deployment +// Clears a trained model deployment cache on all nodes where the trained model +// is assigned. +// A trained model deployment may have an inference cache enabled. +// As requests are handled by each allocated node, their responses may be cached +// on that individual node. +// Calling this API clears the caches without restarting the deployment. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/clear-trained-model-deployment-cache.html func New(tp elastictransport.Interface) *ClearTrainedModelDeploymentCache { @@ -274,7 +284,7 @@ func (r ClearTrainedModelDeploymentCache) IsSuccess(providedCtx context.Context) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -310,3 +320,47 @@ func (r *ClearTrainedModelDeploymentCache) _modelid(modelid string) *ClearTraine return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ClearTrainedModelDeploymentCache) ErrorTrace(errortrace bool) *ClearTrainedModelDeploymentCache { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ClearTrainedModelDeploymentCache) FilterPath(filterpaths ...string) *ClearTrainedModelDeploymentCache { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ClearTrainedModelDeploymentCache) Human(human bool) *ClearTrainedModelDeploymentCache { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ClearTrainedModelDeploymentCache) Pretty(pretty bool) *ClearTrainedModelDeploymentCache { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/cleartrainedmodeldeploymentcache/response.go b/typedapi/ml/cleartrainedmodeldeploymentcache/response.go index 1b0c12bbaa..1ca5ed0e5b 100644 --- a/typedapi/ml/cleartrainedmodeldeploymentcache/response.go +++ b/typedapi/ml/cleartrainedmodeldeploymentcache/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package cleartrainedmodeldeploymentcache // Response holds the response body struct for the package cleartrainedmodeldeploymentcache // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/clear_trained_model_deployment_cache/MlClearTrainedModelDeploymentCacheResponse.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/clear_trained_model_deployment_cache/MlClearTrainedModelDeploymentCacheResponse.ts#L20-L24 type Response struct { Cleared bool `json:"cleared"` } diff --git a/typedapi/ml/closejob/close_job.go b/typedapi/ml/closejob/close_job.go index 6723c95412..351bc4ed7a 100644 --- a/typedapi/ml/closejob/close_job.go +++ b/typedapi/ml/closejob/close_job.go @@ -16,10 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Closes one or more anomaly detection jobs. A job can be opened and closed -// multiple times throughout its lifecycle. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Close anomaly detection jobs +// A job can be opened and closed multiple times throughout its lifecycle. A +// closed job cannot receive data or perform analysis operations, but you can +// still explore and navigate results. +// When you close a job, it runs housekeeping tasks such as pruning the model +// history, flushing buffers, calculating final results and persisting the model +// snapshots. Depending upon the size of the job, it could take several minutes +// to close and the equivalent time to re-open. After it is closed, the job has +// a minimal overhead on the cluster except for maintaining its meta data. +// Therefore it is a best practice to close jobs that are no longer required to +// process data. +// If you close an anomaly detection job whose datafeed is running, the request +// first tries to stop the datafeed. This behavior is equivalent to calling stop +// datafeed API with the same timeout and force parameters as the close job +// request. +// When a datafeed that has a specified end date stops, it automatically closes +// its associated job. package closejob import ( @@ -31,6 +46,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -81,8 +97,23 @@ func NewCloseJobFunc(tp elastictransport.Interface) NewCloseJob { } } -// Closes one or more anomaly detection jobs. A job can be opened and closed -// multiple times throughout its lifecycle. +// Close anomaly detection jobs +// A job can be opened and closed multiple times throughout its lifecycle. A +// closed job cannot receive data or perform analysis operations, but you can +// still explore and navigate results. +// When you close a job, it runs housekeeping tasks such as pruning the model +// history, flushing buffers, calculating final results and persisting the model +// snapshots. Depending upon the size of the job, it could take several minutes +// to close and the equivalent time to re-open. After it is closed, the job has +// a minimal overhead on the cluster except for maintaining its meta data. +// Therefore it is a best practice to close jobs that are no longer required to +// process data. +// If you close an anomaly detection job whose datafeed is running, the request +// first tries to stop the datafeed. This behavior is equivalent to calling stop +// datafeed API with the same timeout and force parameters as the close job +// request. +// When a datafeed that has a specified end date stops, it automatically closes +// its associated job. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-close-job.html func New(tp elastictransport.Interface) *CloseJob { @@ -322,6 +353,50 @@ func (r *CloseJob) _jobid(jobid string) *CloseJob { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *CloseJob) ErrorTrace(errortrace bool) *CloseJob { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *CloseJob) FilterPath(filterpaths ...string) *CloseJob { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *CloseJob) Human(human bool) *CloseJob { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *CloseJob) Pretty(pretty bool) *CloseJob { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // AllowNoMatch Refer to the description for the `allow_no_match` query parameter. // API name: allow_no_match func (r *CloseJob) AllowNoMatch(allownomatch bool) *CloseJob { diff --git a/typedapi/ml/closejob/request.go b/typedapi/ml/closejob/request.go index 3b3d71cd52..0e2aa5193e 100644 --- a/typedapi/ml/closejob/request.go +++ b/typedapi/ml/closejob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package closejob @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package closejob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/close_job/MlCloseJobRequest.ts#L24-L77 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/close_job/MlCloseJobRequest.ts#L24-L77 type Request struct { // AllowNoMatch Refer to the description for the `allow_no_match` query parameter. @@ -47,6 +47,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -77,7 +78,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "allow_no_match": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +92,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "force": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ml/closejob/response.go b/typedapi/ml/closejob/response.go index 21544f4f01..a440865a46 100644 --- a/typedapi/ml/closejob/response.go +++ b/typedapi/ml/closejob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package closejob // Response holds the response body struct for the package closejob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/close_job/MlCloseJobResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/close_job/MlCloseJobResponse.ts#L20-L22 type Response struct { Closed bool `json:"closed"` } diff --git a/typedapi/ml/deletecalendar/delete_calendar.go b/typedapi/ml/deletecalendar/delete_calendar.go index cc1964cdce..0273f11adb 100644 --- a/typedapi/ml/deletecalendar/delete_calendar.go +++ b/typedapi/ml/deletecalendar/delete_calendar.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes a calendar. +// Removes all scheduled events from a calendar, then deletes it. package deletecalendar import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +76,7 @@ func NewDeleteCalendarFunc(tp elastictransport.Interface) NewDeleteCalendar { } } -// Deletes a calendar. +// Removes all scheduled events from a calendar, then deletes it. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-calendar.html func New(tp elastictransport.Interface) *DeleteCalendar { @@ -262,7 +262,7 @@ func (r DeleteCalendar) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -298,3 +298,47 @@ func (r *DeleteCalendar) _calendarid(calendarid string) *DeleteCalendar { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteCalendar) ErrorTrace(errortrace bool) *DeleteCalendar { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteCalendar) FilterPath(filterpaths ...string) *DeleteCalendar { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteCalendar) Human(human bool) *DeleteCalendar { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteCalendar) Pretty(pretty bool) *DeleteCalendar { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/deletecalendar/response.go b/typedapi/ml/deletecalendar/response.go index d59334e124..9d1227642e 100644 --- a/typedapi/ml/deletecalendar/response.go +++ b/typedapi/ml/deletecalendar/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletecalendar // Response holds the response body struct for the package deletecalendar // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/delete_calendar/MlDeleteCalendarResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/delete_calendar/MlDeleteCalendarResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deletecalendarevent/delete_calendar_event.go b/typedapi/ml/deletecalendarevent/delete_calendar_event.go index 92d40046ce..b909077d3d 100644 --- a/typedapi/ml/deletecalendarevent/delete_calendar_event.go +++ b/typedapi/ml/deletecalendarevent/delete_calendar_event.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes scheduled events from a calendar. package deletecalendarevent @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -275,7 +275,7 @@ func (r DeleteCalendarEvent) IsSuccess(providedCtx context.Context) (bool, error if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -321,3 +321,47 @@ func (r *DeleteCalendarEvent) _eventid(eventid string) *DeleteCalendarEvent { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteCalendarEvent) ErrorTrace(errortrace bool) *DeleteCalendarEvent { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteCalendarEvent) FilterPath(filterpaths ...string) *DeleteCalendarEvent { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteCalendarEvent) Human(human bool) *DeleteCalendarEvent { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteCalendarEvent) Pretty(pretty bool) *DeleteCalendarEvent { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/deletecalendarevent/response.go b/typedapi/ml/deletecalendarevent/response.go index 4e1cd4a2f0..1b6839590a 100644 --- a/typedapi/ml/deletecalendarevent/response.go +++ b/typedapi/ml/deletecalendarevent/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletecalendarevent // Response holds the response body struct for the package deletecalendarevent // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/delete_calendar_event/MlDeleteCalendarEventResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/delete_calendar_event/MlDeleteCalendarEventResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deletecalendarjob/delete_calendar_job.go b/typedapi/ml/deletecalendarjob/delete_calendar_job.go index d2066d87be..961d48f68a 100644 --- a/typedapi/ml/deletecalendarjob/delete_calendar_job.go +++ b/typedapi/ml/deletecalendarjob/delete_calendar_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes anomaly detection jobs from a calendar. package deletecalendarjob @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -275,7 +275,7 @@ func (r DeleteCalendarJob) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -322,3 +322,47 @@ func (r *DeleteCalendarJob) _jobid(jobid string) *DeleteCalendarJob { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteCalendarJob) ErrorTrace(errortrace bool) *DeleteCalendarJob { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteCalendarJob) FilterPath(filterpaths ...string) *DeleteCalendarJob { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteCalendarJob) Human(human bool) *DeleteCalendarJob { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteCalendarJob) Pretty(pretty bool) *DeleteCalendarJob { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/deletecalendarjob/response.go b/typedapi/ml/deletecalendarjob/response.go index 3cd1df611f..3a7a861d58 100644 --- a/typedapi/ml/deletecalendarjob/response.go +++ b/typedapi/ml/deletecalendarjob/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletecalendarjob @@ -31,7 +31,7 @@ import ( // Response holds the response body struct for the package deletecalendarjob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/delete_calendar_job/MlDeleteCalendarJobResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/delete_calendar_job/MlDeleteCalendarJobResponse.ts#L22-L31 type Response struct { // CalendarId A string that uniquely identifies a calendar. diff --git a/typedapi/ml/deletedatafeed/delete_datafeed.go b/typedapi/ml/deletedatafeed/delete_datafeed.go index 7c0118d315..936a1ff52f 100644 --- a/typedapi/ml/deletedatafeed/delete_datafeed.go +++ b/typedapi/ml/deletedatafeed/delete_datafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes an existing datafeed. package deletedatafeed @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -263,7 +262,7 @@ func (r DeleteDatafeed) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -311,3 +310,47 @@ func (r *DeleteDatafeed) Force(force bool) *DeleteDatafeed { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteDatafeed) ErrorTrace(errortrace bool) *DeleteDatafeed { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteDatafeed) FilterPath(filterpaths ...string) *DeleteDatafeed { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteDatafeed) Human(human bool) *DeleteDatafeed { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteDatafeed) Pretty(pretty bool) *DeleteDatafeed { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/deletedatafeed/response.go b/typedapi/ml/deletedatafeed/response.go index 4f5094f0a5..f919ae8215 100644 --- a/typedapi/ml/deletedatafeed/response.go +++ b/typedapi/ml/deletedatafeed/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletedatafeed // Response holds the response body struct for the package deletedatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/delete_datafeed/MlDeleteDatafeedResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/delete_datafeed/MlDeleteDatafeedResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deletedataframeanalytics/delete_data_frame_analytics.go b/typedapi/ml/deletedataframeanalytics/delete_data_frame_analytics.go index 34a8190283..b9fc754061 100644 --- a/typedapi/ml/deletedataframeanalytics/delete_data_frame_analytics.go +++ b/typedapi/ml/deletedataframeanalytics/delete_data_frame_analytics.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes an existing data frame analytics job. +// Deletes a data frame analytics job. package deletedataframeanalytics import ( @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -77,7 +76,7 @@ func NewDeleteDataFrameAnalyticsFunc(tp elastictransport.Interface) NewDeleteDat } } -// Deletes an existing data frame analytics job. +// Deletes a data frame analytics job. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-dfanalytics.html func New(tp elastictransport.Interface) *DeleteDataFrameAnalytics { @@ -265,7 +264,7 @@ func (r DeleteDataFrameAnalytics) IsSuccess(providedCtx context.Context) (bool, if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -318,3 +317,47 @@ func (r *DeleteDataFrameAnalytics) Timeout(duration string) *DeleteDataFrameAnal return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteDataFrameAnalytics) ErrorTrace(errortrace bool) *DeleteDataFrameAnalytics { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteDataFrameAnalytics) FilterPath(filterpaths ...string) *DeleteDataFrameAnalytics { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteDataFrameAnalytics) Human(human bool) *DeleteDataFrameAnalytics { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteDataFrameAnalytics) Pretty(pretty bool) *DeleteDataFrameAnalytics { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/deletedataframeanalytics/response.go b/typedapi/ml/deletedataframeanalytics/response.go index 4d57debf0d..059c081391 100644 --- a/typedapi/ml/deletedataframeanalytics/response.go +++ b/typedapi/ml/deletedataframeanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletedataframeanalytics // Response holds the response body struct for the package deletedataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/delete_data_frame_analytics/MlDeleteDataFrameAnalyticsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/delete_data_frame_analytics/MlDeleteDataFrameAnalyticsResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deleteexpireddata/delete_expired_data.go b/typedapi/ml/deleteexpireddata/delete_expired_data.go index 576ad5fcb9..6619bf9645 100644 --- a/typedapi/ml/deleteexpireddata/delete_expired_data.go +++ b/typedapi/ml/deleteexpireddata/delete_expired_data.go @@ -16,9 +16,17 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes expired and unused machine learning data. +// Deletes all job results, model snapshots and forecast data that have exceeded +// their retention days period. Machine learning state documents that are not +// associated with any job are also deleted. +// You can limit the request to a single or set of anomaly detection jobs by +// using a job identifier, a group name, a comma-separated list of jobs, or a +// wildcard expression. You can delete expired data for all anomaly detection +// jobs by using _all, by specifying * as the , or by omitting the +// . package deleteexpireddata import ( @@ -30,6 +38,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -79,6 +88,14 @@ func NewDeleteExpiredDataFunc(tp elastictransport.Interface) NewDeleteExpiredDat } // Deletes expired and unused machine learning data. +// Deletes all job results, model snapshots and forecast data that have exceeded +// their retention days period. Machine learning state documents that are not +// associated with any job are also deleted. +// You can limit the request to a single or set of anomaly detection jobs by +// using a job identifier, a group name, a comma-separated list of jobs, or a +// wildcard expression. You can delete expired data for all anomaly detection +// jobs by using _all, by specifying * as the , or by omitting the +// . // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-expired-data.html func New(tp elastictransport.Interface) *DeleteExpiredData { @@ -320,6 +337,50 @@ func (r *DeleteExpiredData) JobId(jobid string) *DeleteExpiredData { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteExpiredData) ErrorTrace(errortrace bool) *DeleteExpiredData { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteExpiredData) FilterPath(filterpaths ...string) *DeleteExpiredData { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteExpiredData) Human(human bool) *DeleteExpiredData { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteExpiredData) Pretty(pretty bool) *DeleteExpiredData { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // RequestsPerSecond The desired requests per second for the deletion processes. The default // behavior is no throttling. // API name: requests_per_second diff --git a/typedapi/ml/deleteexpireddata/request.go b/typedapi/ml/deleteexpireddata/request.go index ad727ffbf7..c097343e9d 100644 --- a/typedapi/ml/deleteexpireddata/request.go +++ b/typedapi/ml/deleteexpireddata/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deleteexpireddata @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package deleteexpireddata // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/delete_expired_data/MlDeleteExpiredDataRequest.ts#L25-L72 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/delete_expired_data/MlDeleteExpiredDataRequest.ts#L25-L72 type Request struct { // RequestsPerSecond The desired requests per second for the deletion processes. The default @@ -46,6 +46,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -76,7 +77,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "requests_per_second": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ml/deleteexpireddata/response.go b/typedapi/ml/deleteexpireddata/response.go index 8b277d7788..ff0c4c95ac 100644 --- a/typedapi/ml/deleteexpireddata/response.go +++ b/typedapi/ml/deleteexpireddata/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deleteexpireddata // Response holds the response body struct for the package deleteexpireddata // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/delete_expired_data/MlDeleteExpiredDataResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/delete_expired_data/MlDeleteExpiredDataResponse.ts#L20-L22 type Response struct { Deleted bool `json:"deleted"` } diff --git a/typedapi/ml/deletefilter/delete_filter.go b/typedapi/ml/deletefilter/delete_filter.go index e113bb7b7a..d3defe5b30 100644 --- a/typedapi/ml/deletefilter/delete_filter.go +++ b/typedapi/ml/deletefilter/delete_filter.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes a filter. +// If an anomaly detection job references the filter, you cannot delete the +// filter. You must update or delete the job before you can delete the filter. package deletefilter import ( @@ -27,9 +29,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -77,6 +79,8 @@ func NewDeleteFilterFunc(tp elastictransport.Interface) NewDeleteFilter { } // Deletes a filter. +// If an anomaly detection job references the filter, you cannot delete the +// filter. You must update or delete the job before you can delete the filter. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-filter.html func New(tp elastictransport.Interface) *DeleteFilter { @@ -262,7 +266,7 @@ func (r DeleteFilter) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -298,3 +302,47 @@ func (r *DeleteFilter) _filterid(filterid string) *DeleteFilter { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteFilter) ErrorTrace(errortrace bool) *DeleteFilter { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteFilter) FilterPath(filterpaths ...string) *DeleteFilter { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteFilter) Human(human bool) *DeleteFilter { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteFilter) Pretty(pretty bool) *DeleteFilter { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/deletefilter/response.go b/typedapi/ml/deletefilter/response.go index 918c9b48f6..314ae4ad7d 100644 --- a/typedapi/ml/deletefilter/response.go +++ b/typedapi/ml/deletefilter/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletefilter // Response holds the response body struct for the package deletefilter // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/delete_filter/MlDeleteFilterResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/delete_filter/MlDeleteFilterResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deleteforecast/delete_forecast.go b/typedapi/ml/deleteforecast/delete_forecast.go index 2e7c7df1de..4415dea3bd 100644 --- a/typedapi/ml/deleteforecast/delete_forecast.go +++ b/typedapi/ml/deleteforecast/delete_forecast.go @@ -16,9 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes forecasts from a machine learning job. +// By default, forecasts are retained for 14 days. You can specify a +// different retention period with the `expires_in` parameter in the forecast +// jobs API. The delete forecast API enables you to delete one or more +// forecasts before they expire. package deleteforecast import ( @@ -27,7 +31,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -81,6 +84,10 @@ func NewDeleteForecastFunc(tp elastictransport.Interface) NewDeleteForecast { } // Deletes forecasts from a machine learning job. +// By default, forecasts are retained for 14 days. You can specify a +// different retention period with the `expires_in` parameter in the forecast +// jobs API. The delete forecast API enables you to delete one or more +// forecasts before they expire. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-forecast.html func New(tp elastictransport.Interface) *DeleteForecast { @@ -289,7 +296,7 @@ func (r DeleteForecast) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -357,3 +364,47 @@ func (r *DeleteForecast) Timeout(duration string) *DeleteForecast { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteForecast) ErrorTrace(errortrace bool) *DeleteForecast { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteForecast) FilterPath(filterpaths ...string) *DeleteForecast { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteForecast) Human(human bool) *DeleteForecast { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteForecast) Pretty(pretty bool) *DeleteForecast { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/deleteforecast/response.go b/typedapi/ml/deleteforecast/response.go index b1a8230644..4f5b252c19 100644 --- a/typedapi/ml/deleteforecast/response.go +++ b/typedapi/ml/deleteforecast/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deleteforecast // Response holds the response body struct for the package deleteforecast // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/delete_forecast/MlDeleteForecastResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/delete_forecast/MlDeleteForecastResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deletejob/delete_job.go b/typedapi/ml/deletejob/delete_job.go index 9e10acf319..25fd8622a9 100644 --- a/typedapi/ml/deletejob/delete_job.go +++ b/typedapi/ml/deletejob/delete_job.go @@ -16,9 +16,16 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes an existing anomaly detection job. +// Deletes an anomaly detection job. +// +// All job configuration, model state and results are deleted. +// It is not currently possible to delete multiple jobs using wildcards or a +// comma separated list. If you delete a job that has a datafeed, the request +// first tries to delete the datafeed. This behavior is equivalent to calling +// the delete datafeed API with the same timeout and force parameters as the +// delete job request. package deletejob import ( @@ -27,7 +34,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -77,7 +83,14 @@ func NewDeleteJobFunc(tp elastictransport.Interface) NewDeleteJob { } } -// Deletes an existing anomaly detection job. +// Deletes an anomaly detection job. +// +// All job configuration, model state and results are deleted. +// It is not currently possible to delete multiple jobs using wildcards or a +// comma separated list. If you delete a job that has a datafeed, the request +// first tries to delete the datafeed. This behavior is equivalent to calling +// the delete datafeed API with the same timeout and force parameters as the +// delete job request. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-job.html func New(tp elastictransport.Interface) *DeleteJob { @@ -263,7 +276,7 @@ func (r DeleteJob) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -328,3 +341,47 @@ func (r *DeleteJob) WaitForCompletion(waitforcompletion bool) *DeleteJob { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteJob) ErrorTrace(errortrace bool) *DeleteJob { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteJob) FilterPath(filterpaths ...string) *DeleteJob { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteJob) Human(human bool) *DeleteJob { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteJob) Pretty(pretty bool) *DeleteJob { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/deletejob/response.go b/typedapi/ml/deletejob/response.go index ee088a6db0..e40f6f5d8d 100644 --- a/typedapi/ml/deletejob/response.go +++ b/typedapi/ml/deletejob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletejob // Response holds the response body struct for the package deletejob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/delete_job/MlDeleteJobResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/delete_job/MlDeleteJobResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deletemodelsnapshot/delete_model_snapshot.go b/typedapi/ml/deletemodelsnapshot/delete_model_snapshot.go index 87183749af..701dc8a27b 100644 --- a/typedapi/ml/deletemodelsnapshot/delete_model_snapshot.go +++ b/typedapi/ml/deletemodelsnapshot/delete_model_snapshot.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes an existing model snapshot. +// You cannot delete the active model snapshot. To delete that snapshot, first +// revert to a different one. To identify the active model snapshot, refer to +// the `model_snapshot_id` in the results from the get jobs API. package deletemodelsnapshot import ( @@ -27,9 +30,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -82,6 +85,9 @@ func NewDeleteModelSnapshotFunc(tp elastictransport.Interface) NewDeleteModelSna } // Deletes an existing model snapshot. +// You cannot delete the active model snapshot. To delete that snapshot, first +// revert to a different one. To identify the active model snapshot, refer to +// the `model_snapshot_id` in the results from the get jobs API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-snapshot.html func New(tp elastictransport.Interface) *DeleteModelSnapshot { @@ -275,7 +281,7 @@ func (r DeleteModelSnapshot) IsSuccess(providedCtx context.Context) (bool, error if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -320,3 +326,47 @@ func (r *DeleteModelSnapshot) _snapshotid(snapshotid string) *DeleteModelSnapsho return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteModelSnapshot) ErrorTrace(errortrace bool) *DeleteModelSnapshot { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteModelSnapshot) FilterPath(filterpaths ...string) *DeleteModelSnapshot { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteModelSnapshot) Human(human bool) *DeleteModelSnapshot { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteModelSnapshot) Pretty(pretty bool) *DeleteModelSnapshot { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/deletemodelsnapshot/response.go b/typedapi/ml/deletemodelsnapshot/response.go index 414e8b7b86..8933760702 100644 --- a/typedapi/ml/deletemodelsnapshot/response.go +++ b/typedapi/ml/deletemodelsnapshot/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletemodelsnapshot // Response holds the response body struct for the package deletemodelsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/delete_model_snapshot/MlDeleteModelSnapshotResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/delete_model_snapshot/MlDeleteModelSnapshotResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deletetrainedmodel/delete_trained_model.go b/typedapi/ml/deletetrainedmodel/delete_trained_model.go index dc4c421807..8cc0678107 100644 --- a/typedapi/ml/deletetrainedmodel/delete_trained_model.go +++ b/typedapi/ml/deletetrainedmodel/delete_trained_model.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes an existing trained inference model that is currently not referenced // by an ingest pipeline. @@ -28,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -265,7 +264,7 @@ func (r DeleteTrainedModel) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -310,3 +309,47 @@ func (r *DeleteTrainedModel) Force(force bool) *DeleteTrainedModel { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteTrainedModel) ErrorTrace(errortrace bool) *DeleteTrainedModel { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteTrainedModel) FilterPath(filterpaths ...string) *DeleteTrainedModel { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteTrainedModel) Human(human bool) *DeleteTrainedModel { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteTrainedModel) Pretty(pretty bool) *DeleteTrainedModel { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/deletetrainedmodel/response.go b/typedapi/ml/deletetrainedmodel/response.go index e46c2c45bb..85ca7e33fb 100644 --- a/typedapi/ml/deletetrainedmodel/response.go +++ b/typedapi/ml/deletetrainedmodel/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletetrainedmodel // Response holds the response body struct for the package deletetrainedmodel // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/delete_trained_model/MlDeleteTrainedModelResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/delete_trained_model/MlDeleteTrainedModelResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/deletetrainedmodelalias/delete_trained_model_alias.go b/typedapi/ml/deletetrainedmodelalias/delete_trained_model_alias.go index f70d19d9ba..eeb0c588b6 100644 --- a/typedapi/ml/deletetrainedmodelalias/delete_trained_model_alias.go +++ b/typedapi/ml/deletetrainedmodelalias/delete_trained_model_alias.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes a model alias that refers to the trained model +// Deletes a trained model alias. +// This API deletes an existing model alias that refers to a trained model. If +// the model alias is missing or refers to a model other than the one identified +// by the `model_id`, this API returns an error. package deletetrainedmodelalias import ( @@ -27,9 +30,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -81,7 +84,10 @@ func NewDeleteTrainedModelAliasFunc(tp elastictransport.Interface) NewDeleteTrai } } -// Deletes a model alias that refers to the trained model +// Deletes a trained model alias. +// This API deletes an existing model alias that refers to a trained model. If +// the model alias is missing or refers to a model other than the one identified +// by the `model_id`, this API returns an error. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-trained-models-aliases.html func New(tp elastictransport.Interface) *DeleteTrainedModelAlias { @@ -281,7 +287,7 @@ func (r DeleteTrainedModelAlias) IsSuccess(providedCtx context.Context) (bool, e if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -326,3 +332,47 @@ func (r *DeleteTrainedModelAlias) _modelid(modelid string) *DeleteTrainedModelAl return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteTrainedModelAlias) ErrorTrace(errortrace bool) *DeleteTrainedModelAlias { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteTrainedModelAlias) FilterPath(filterpaths ...string) *DeleteTrainedModelAlias { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteTrainedModelAlias) Human(human bool) *DeleteTrainedModelAlias { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteTrainedModelAlias) Pretty(pretty bool) *DeleteTrainedModelAlias { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/deletetrainedmodelalias/response.go b/typedapi/ml/deletetrainedmodelalias/response.go index b9b26a0ac2..fc6921ff7e 100644 --- a/typedapi/ml/deletetrainedmodelalias/response.go +++ b/typedapi/ml/deletetrainedmodelalias/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletetrainedmodelalias // Response holds the response body struct for the package deletetrainedmodelalias // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/delete_trained_model_alias/MlDeleteTrainedModelAliasResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/delete_trained_model_alias/MlDeleteTrainedModelAliasResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/estimatemodelmemory/estimate_model_memory.go b/typedapi/ml/estimatemodelmemory/estimate_model_memory.go index ecf8075eee..9f75c49d39 100644 --- a/typedapi/ml/estimatemodelmemory/estimate_model_memory.go +++ b/typedapi/ml/estimatemodelmemory/estimate_model_memory.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Estimates the model memory +// Makes an estimation of the memory usage for an anomaly detection job model. +// It is based on analysis configuration details for the job and cardinality +// estimates for the fields it references. package estimatemodelmemory import ( @@ -30,6 +32,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -72,7 +75,9 @@ func NewEstimateModelMemoryFunc(tp elastictransport.Interface) NewEstimateModelM } } -// Estimates the model memory +// Makes an estimation of the memory usage for an anomaly detection job model. +// It is based on analysis configuration details for the job and cardinality +// estimates for the fields it references. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-apis.html func New(tp elastictransport.Interface) *EstimateModelMemory { @@ -293,6 +298,50 @@ func (r *EstimateModelMemory) Header(key, value string) *EstimateModelMemory { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *EstimateModelMemory) ErrorTrace(errortrace bool) *EstimateModelMemory { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *EstimateModelMemory) FilterPath(filterpaths ...string) *EstimateModelMemory { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *EstimateModelMemory) Human(human bool) *EstimateModelMemory { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *EstimateModelMemory) Pretty(pretty bool) *EstimateModelMemory { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // AnalysisConfig For a list of the properties that you can specify in the // `analysis_config` component of the body of this API. // API name: analysis_config diff --git a/typedapi/ml/estimatemodelmemory/request.go b/typedapi/ml/estimatemodelmemory/request.go index 6dc9a02476..280d8bf160 100644 --- a/typedapi/ml/estimatemodelmemory/request.go +++ b/typedapi/ml/estimatemodelmemory/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package estimatemodelmemory @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package estimatemodelmemory // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/estimate_model_memory/MlEstimateModelMemoryRequest.ts#L26-L61 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/estimate_model_memory/MlEstimateModelMemoryRequest.ts#L26-L61 type Request struct { // AnalysisConfig For a list of the properties that you can specify in the @@ -57,6 +57,7 @@ func NewRequest() *Request { MaxBucketCardinality: make(map[string]int64, 0), OverallCardinality: make(map[string]int64, 0), } + return r } diff --git a/typedapi/ml/estimatemodelmemory/response.go b/typedapi/ml/estimatemodelmemory/response.go index 8502186865..e01e194add 100644 --- a/typedapi/ml/estimatemodelmemory/response.go +++ b/typedapi/ml/estimatemodelmemory/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package estimatemodelmemory // Response holds the response body struct for the package estimatemodelmemory // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/estimate_model_memory/MlEstimateModelMemoryResponse.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/estimate_model_memory/MlEstimateModelMemoryResponse.ts#L20-L24 type Response struct { ModelMemoryEstimate string `json:"model_memory_estimate"` } diff --git a/typedapi/ml/evaluatedataframe/evaluate_data_frame.go b/typedapi/ml/evaluatedataframe/evaluate_data_frame.go index 6748dcaffc..156b4c8fd2 100644 --- a/typedapi/ml/evaluatedataframe/evaluate_data_frame.go +++ b/typedapi/ml/evaluatedataframe/evaluate_data_frame.go @@ -16,9 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Evaluates the data frame analytics for an annotated index. +// The API packages together commonly used evaluation metrics for various types +// of machine learning features. This has been designed for use on indexes +// created by data frame analytics. Evaluation requires both a ground truth +// field and an analytics result field to be present. package evaluatedataframe import ( @@ -30,6 +34,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -73,6 +78,10 @@ func NewEvaluateDataFrameFunc(tp elastictransport.Interface) NewEvaluateDataFram } // Evaluates the data frame analytics for an annotated index. +// The API packages together commonly used evaluation metrics for various types +// of machine learning features. This has been designed for use on indexes +// created by data frame analytics. Evaluation requires both a ground truth +// field and an analytics result field to be present. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/evaluate-dfanalytics.html func New(tp elastictransport.Interface) *EvaluateDataFrame { @@ -293,6 +302,50 @@ func (r *EvaluateDataFrame) Header(key, value string) *EvaluateDataFrame { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *EvaluateDataFrame) ErrorTrace(errortrace bool) *EvaluateDataFrame { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *EvaluateDataFrame) FilterPath(filterpaths ...string) *EvaluateDataFrame { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *EvaluateDataFrame) Human(human bool) *EvaluateDataFrame { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *EvaluateDataFrame) Pretty(pretty bool) *EvaluateDataFrame { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Evaluation Defines the type of evaluation you want to perform. // API name: evaluation func (r *EvaluateDataFrame) Evaluation(evaluation *types.DataframeEvaluationContainer) *EvaluateDataFrame { diff --git a/typedapi/ml/evaluatedataframe/request.go b/typedapi/ml/evaluatedataframe/request.go index e1437676a5..15657753ae 100644 --- a/typedapi/ml/evaluatedataframe/request.go +++ b/typedapi/ml/evaluatedataframe/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package evaluatedataframe @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package evaluatedataframe // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/MlEvaluateDataFrameRequest.ts#L25-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/MlEvaluateDataFrameRequest.ts#L25-L52 type Request struct { // Evaluation Defines the type of evaluation you want to perform. @@ -46,6 +46,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/evaluatedataframe/response.go b/typedapi/ml/evaluatedataframe/response.go index c51d4bd821..7875e5098b 100644 --- a/typedapi/ml/evaluatedataframe/response.go +++ b/typedapi/ml/evaluatedataframe/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package evaluatedataframe @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package evaluatedataframe // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/MlEvaluateDataFrameResponse.ts#L26-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/MlEvaluateDataFrameResponse.ts#L26-L33 type Response struct { Classification *types.DataframeClassificationSummary `json:"classification,omitempty"` OutlierDetection *types.DataframeOutlierDetectionSummary `json:"outlier_detection,omitempty"` diff --git a/typedapi/ml/explaindataframeanalytics/explain_data_frame_analytics.go b/typedapi/ml/explaindataframeanalytics/explain_data_frame_analytics.go index 5717e52a43..84ad40a64d 100644 --- a/typedapi/ml/explaindataframeanalytics/explain_data_frame_analytics.go +++ b/typedapi/ml/explaindataframeanalytics/explain_data_frame_analytics.go @@ -16,9 +16,17 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Explains a data frame analytics config. +// This API provides explanations for a data frame analytics config that either +// exists already or one that has not been created yet. The following +// explanations are provided: +// * which fields are included or not in the analysis and why, +// * how much memory is estimated to be required. The estimate can be used when +// deciding the appropriate value for model_memory_limit setting later on. +// If you have object fields or fields that are excluded via source filtering, +// they are not included in the explanation. package explaindataframeanalytics import ( @@ -30,6 +38,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -79,6 +88,14 @@ func NewExplainDataFrameAnalyticsFunc(tp elastictransport.Interface) NewExplainD } // Explains a data frame analytics config. +// This API provides explanations for a data frame analytics config that either +// exists already or one that has not been created yet. The following +// explanations are provided: +// * which fields are included or not in the analysis and why, +// * how much memory is estimated to be required. The estimate can be used when +// deciding the appropriate value for model_memory_limit setting later on. +// If you have object fields or fields that are excluded via source filtering, +// they are not included in the explanation. // // http://www.elastic.co/guide/en/elasticsearch/reference/current/explain-dfanalytics.html func New(tp elastictransport.Interface) *ExplainDataFrameAnalytics { @@ -329,6 +346,50 @@ func (r *ExplainDataFrameAnalytics) Id(id string) *ExplainDataFrameAnalytics { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ExplainDataFrameAnalytics) ErrorTrace(errortrace bool) *ExplainDataFrameAnalytics { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ExplainDataFrameAnalytics) FilterPath(filterpaths ...string) *ExplainDataFrameAnalytics { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ExplainDataFrameAnalytics) Human(human bool) *ExplainDataFrameAnalytics { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ExplainDataFrameAnalytics) Pretty(pretty bool) *ExplainDataFrameAnalytics { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // AllowLazyStart Specifies whether this job can start when there is insufficient machine // learning node capacity for it to be immediately assigned to a node. // API name: allow_lazy_start diff --git a/typedapi/ml/explaindataframeanalytics/request.go b/typedapi/ml/explaindataframeanalytics/request.go index 718b72ba68..8c60a6912a 100644 --- a/typedapi/ml/explaindataframeanalytics/request.go +++ b/typedapi/ml/explaindataframeanalytics/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package explaindataframeanalytics @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package explaindataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/explain_data_frame_analytics/MlExplainDataFrameAnalyticsRequest.ts#L30-L107 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/explain_data_frame_analytics/MlExplainDataFrameAnalyticsRequest.ts#L30-L107 type Request struct { // AllowLazyStart Specifies whether this job can start when there is insufficient machine @@ -69,6 +69,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/explaindataframeanalytics/response.go b/typedapi/ml/explaindataframeanalytics/response.go index bd7f3c4e66..8c44186b27 100644 --- a/typedapi/ml/explaindataframeanalytics/response.go +++ b/typedapi/ml/explaindataframeanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package explaindataframeanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package explaindataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/explain_data_frame_analytics/MlExplainDataFrameAnalyticsResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/explain_data_frame_analytics/MlExplainDataFrameAnalyticsResponse.ts#L25-L32 type Response struct { // FieldSelection An array of objects that explain selection for each field, sorted by the diff --git a/typedapi/ml/flushjob/flush_job.go b/typedapi/ml/flushjob/flush_job.go index ecfdd56805..f87401fd4c 100644 --- a/typedapi/ml/flushjob/flush_job.go +++ b/typedapi/ml/flushjob/flush_job.go @@ -16,9 +16,17 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Forces any buffered data to be processed by the job. +// The flush jobs API is only applicable when sending data for analysis using +// the post data API. Depending on the content of the buffer, then it might +// additionally calculate new results. Both flush and close operations are +// similar, however the flush is more efficient if you are expecting to send +// more data for analysis. When flushing, the job remains open and is available +// to continue analyzing data. A close operation additionally prunes and +// persists the model state to disk and the job must be opened again before +// analyzing further data. package flushjob import ( @@ -30,6 +38,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -81,6 +90,14 @@ func NewFlushJobFunc(tp elastictransport.Interface) NewFlushJob { } // Forces any buffered data to be processed by the job. +// The flush jobs API is only applicable when sending data for analysis using +// the post data API. Depending on the content of the buffer, then it might +// additionally calculate new results. Both flush and close operations are +// similar, however the flush is more efficient if you are expecting to send +// more data for analysis. When flushing, the job remains open and is available +// to continue analyzing data. A close operation additionally prunes and +// persists the model state to disk and the job must be opened again before +// analyzing further data. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-flush-job.html func New(tp elastictransport.Interface) *FlushJob { @@ -316,6 +333,50 @@ func (r *FlushJob) _jobid(jobid string) *FlushJob { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *FlushJob) ErrorTrace(errortrace bool) *FlushJob { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *FlushJob) FilterPath(filterpaths ...string) *FlushJob { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *FlushJob) Human(human bool) *FlushJob { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *FlushJob) Pretty(pretty bool) *FlushJob { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // AdvanceTime Refer to the description for the `advance_time` query parameter. // API name: advance_time func (r *FlushJob) AdvanceTime(datetime types.DateTime) *FlushJob { diff --git a/typedapi/ml/flushjob/request.go b/typedapi/ml/flushjob/request.go index 6ea019fd38..1d04ad87b8 100644 --- a/typedapi/ml/flushjob/request.go +++ b/typedapi/ml/flushjob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package flushjob @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package flushjob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/flush_job/MlFlushJobRequest.ts#L24-L99 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/flush_job/MlFlushJobRequest.ts#L24-L99 type Request struct { // AdvanceTime Refer to the description for the `advance_time` query parameter. @@ -51,6 +51,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -86,7 +87,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "calc_interim": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ml/flushjob/response.go b/typedapi/ml/flushjob/response.go index a6562b8ea5..0d0639636e 100644 --- a/typedapi/ml/flushjob/response.go +++ b/typedapi/ml/flushjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package flushjob // Response holds the response body struct for the package flushjob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/flush_job/MlFlushJobResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/flush_job/MlFlushJobResponse.ts#L22-L31 type Response struct { Flushed bool `json:"flushed"` // LastFinalizedBucketEnd Provides the timestamp (in milliseconds since the epoch) of the end of diff --git a/typedapi/ml/forecast/forecast.go b/typedapi/ml/forecast/forecast.go index b200584d3a..1c2ab0b2ee 100644 --- a/typedapi/ml/forecast/forecast.go +++ b/typedapi/ml/forecast/forecast.go @@ -16,10 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Predicts the future behavior of a time series by using its historical // behavior. +// +// Forecasts are not supported for jobs that perform population analysis; an +// error occurs if you try to create a forecast for a job that has an +// `over_field_name` in its configuration. package forecast import ( @@ -31,6 +35,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -84,6 +89,10 @@ func NewForecastFunc(tp elastictransport.Interface) NewForecast { // Predicts the future behavior of a time series by using its historical // behavior. // +// Forecasts are not supported for jobs that perform population analysis; an +// error occurs if you try to create a forecast for a job that has an +// `over_field_name` in its configuration. +// // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-forecast.html func New(tp elastictransport.Interface) *Forecast { r := &Forecast{ @@ -319,6 +328,50 @@ func (r *Forecast) _jobid(jobid string) *Forecast { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Forecast) ErrorTrace(errortrace bool) *Forecast { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Forecast) FilterPath(filterpaths ...string) *Forecast { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Forecast) Human(human bool) *Forecast { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Forecast) Pretty(pretty bool) *Forecast { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Duration Refer to the description for the `duration` query parameter. // API name: duration func (r *Forecast) Duration(duration types.Duration) *Forecast { diff --git a/typedapi/ml/forecast/request.go b/typedapi/ml/forecast/request.go index e1f3549dfa..935e469d00 100644 --- a/typedapi/ml/forecast/request.go +++ b/typedapi/ml/forecast/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package forecast @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package forecast // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/forecast/MlForecastJobRequest.ts#L24-L87 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/forecast/MlForecastJobRequest.ts#L24-L87 type Request struct { // Duration Refer to the description for the `duration` query parameter. @@ -47,6 +47,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/forecast/response.go b/typedapi/ml/forecast/response.go index bcad368c4c..cc82a84814 100644 --- a/typedapi/ml/forecast/response.go +++ b/typedapi/ml/forecast/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package forecast // Response holds the response body struct for the package forecast // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/forecast/MlForecastJobResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/forecast/MlForecastJobResponse.ts#L22-L27 type Response struct { Acknowledged bool `json:"acknowledged"` ForecastId string `json:"forecast_id"` diff --git a/typedapi/ml/getbuckets/get_buckets.go b/typedapi/ml/getbuckets/get_buckets.go index 05258e80c7..48a5cbbe6a 100644 --- a/typedapi/ml/getbuckets/get_buckets.go +++ b/typedapi/ml/getbuckets/get_buckets.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves anomaly detection job results for one or more buckets. +// The API presents a chronological view of the records, grouped by bucket. package getbuckets import ( @@ -85,6 +86,7 @@ func NewGetBucketsFunc(tp elastictransport.Interface) NewGetBuckets { } // Retrieves anomaly detection job results for one or more buckets. +// The API presents a chronological view of the records, grouped by bucket. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-bucket.html func New(tp elastictransport.Interface) *GetBuckets { @@ -371,6 +373,50 @@ func (r *GetBuckets) Size(size int) *GetBuckets { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetBuckets) ErrorTrace(errortrace bool) *GetBuckets { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetBuckets) FilterPath(filterpaths ...string) *GetBuckets { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetBuckets) Human(human bool) *GetBuckets { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetBuckets) Pretty(pretty bool) *GetBuckets { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // AnomalyScore Refer to the description for the `anomaly_score` query parameter. // API name: anomaly_score func (r *GetBuckets) AnomalyScore(anomalyscore types.Float64) *GetBuckets { diff --git a/typedapi/ml/getbuckets/request.go b/typedapi/ml/getbuckets/request.go index b3de644092..2bc3659feb 100644 --- a/typedapi/ml/getbuckets/request.go +++ b/typedapi/ml/getbuckets/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getbuckets @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package getbuckets // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_buckets/MlGetBucketsRequest.ts#L26-L133 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_buckets/MlGetBucketsRequest.ts#L26-L133 type Request struct { // AnomalyScore Refer to the description for the `anomaly_score` query parameter. @@ -56,6 +56,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -86,7 +87,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "anomaly_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -102,7 +103,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "desc": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -121,7 +122,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "exclude_interim": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -135,7 +136,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "expand": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ml/getbuckets/response.go b/typedapi/ml/getbuckets/response.go index 0c5b2ccfc4..72386705b7 100644 --- a/typedapi/ml/getbuckets/response.go +++ b/typedapi/ml/getbuckets/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getbuckets @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getbuckets // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_buckets/MlGetBucketsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_buckets/MlGetBucketsResponse.ts#L23-L28 type Response struct { Buckets []types.BucketSummary `json:"buckets"` Count int64 `json:"count"` diff --git a/typedapi/ml/getcalendarevents/get_calendar_events.go b/typedapi/ml/getcalendarevents/get_calendar_events.go index 1db90c78ef..55c453edbf 100644 --- a/typedapi/ml/getcalendarevents/get_calendar_events.go +++ b/typedapi/ml/getcalendarevents/get_calendar_events.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves information about the scheduled events in calendars. package getcalendarevents @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -265,7 +264,7 @@ func (r GetCalendarEvents) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -345,3 +344,47 @@ func (r *GetCalendarEvents) Start(datetime string) *GetCalendarEvents { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetCalendarEvents) ErrorTrace(errortrace bool) *GetCalendarEvents { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetCalendarEvents) FilterPath(filterpaths ...string) *GetCalendarEvents { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetCalendarEvents) Human(human bool) *GetCalendarEvents { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetCalendarEvents) Pretty(pretty bool) *GetCalendarEvents { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/getcalendarevents/response.go b/typedapi/ml/getcalendarevents/response.go index a2f9d04f7d..344c041066 100644 --- a/typedapi/ml/getcalendarevents/response.go +++ b/typedapi/ml/getcalendarevents/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getcalendarevents @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getcalendarevents // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_calendar_events/MlGetCalendarEventsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_calendar_events/MlGetCalendarEventsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Events []types.CalendarEvent `json:"events"` diff --git a/typedapi/ml/getcalendars/get_calendars.go b/typedapi/ml/getcalendars/get_calendars.go index 00c56a6633..f3e344d8d7 100644 --- a/typedapi/ml/getcalendars/get_calendars.go +++ b/typedapi/ml/getcalendars/get_calendars.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves configuration information for calendars. package getcalendars @@ -341,6 +341,50 @@ func (r *GetCalendars) Size(size int) *GetCalendars { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetCalendars) ErrorTrace(errortrace bool) *GetCalendars { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetCalendars) FilterPath(filterpaths ...string) *GetCalendars { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetCalendars) Human(human bool) *GetCalendars { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetCalendars) Pretty(pretty bool) *GetCalendars { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Page This object is supported only when you omit the calendar identifier. // API name: page func (r *GetCalendars) Page(page *types.Page) *GetCalendars { diff --git a/typedapi/ml/getcalendars/request.go b/typedapi/ml/getcalendars/request.go index 0253ba1c97..98f3aa2631 100644 --- a/typedapi/ml/getcalendars/request.go +++ b/typedapi/ml/getcalendars/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getcalendars @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package getcalendars // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_calendars/MlGetCalendarsRequest.ts#L25-L51 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_calendars/MlGetCalendarsRequest.ts#L25-L51 type Request struct { // Page This object is supported only when you omit the calendar identifier. @@ -39,6 +39,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/getcalendars/response.go b/typedapi/ml/getcalendars/response.go index da6e4bcf54..d11394ad0d 100644 --- a/typedapi/ml/getcalendars/response.go +++ b/typedapi/ml/getcalendars/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getcalendars @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getcalendars // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_calendars/MlGetCalendarsResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_calendars/MlGetCalendarsResponse.ts#L23-L25 type Response struct { Calendars []types.Calendar `json:"calendars"` Count int64 `json:"count"` diff --git a/typedapi/ml/getcategories/get_categories.go b/typedapi/ml/getcategories/get_categories.go index b80173e7c1..a1a63d57d2 100644 --- a/typedapi/ml/getcategories/get_categories.go +++ b/typedapi/ml/getcategories/get_categories.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves anomaly detection job results for one or more categories. package getcategories @@ -382,6 +382,50 @@ func (r *GetCategories) Size(size int) *GetCategories { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetCategories) ErrorTrace(errortrace bool) *GetCategories { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetCategories) FilterPath(filterpaths ...string) *GetCategories { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetCategories) Human(human bool) *GetCategories { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetCategories) Pretty(pretty bool) *GetCategories { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Page Configures pagination. // This parameter has the `from` and `size` properties. // API name: page diff --git a/typedapi/ml/getcategories/request.go b/typedapi/ml/getcategories/request.go index 07a1cd00df..81d1357640 100644 --- a/typedapi/ml/getcategories/request.go +++ b/typedapi/ml/getcategories/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getcategories @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package getcategories // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_categories/MlGetCategoriesRequest.ts#L25-L70 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_categories/MlGetCategoriesRequest.ts#L25-L70 type Request struct { // Page Configures pagination. @@ -40,6 +40,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/getcategories/response.go b/typedapi/ml/getcategories/response.go index e5dca0ef2c..0562e8e0ab 100644 --- a/typedapi/ml/getcategories/response.go +++ b/typedapi/ml/getcategories/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getcategories @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getcategories // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_categories/MlGetCategoriesResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_categories/MlGetCategoriesResponse.ts#L23-L28 type Response struct { Categories []types.Category `json:"categories"` Count int64 `json:"count"` diff --git a/typedapi/ml/getdatafeeds/get_datafeeds.go b/typedapi/ml/getdatafeeds/get_datafeeds.go index 19ee742d0b..d7a80ecf7f 100644 --- a/typedapi/ml/getdatafeeds/get_datafeeds.go +++ b/typedapi/ml/getdatafeeds/get_datafeeds.go @@ -16,9 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves configuration information for datafeeds. +// You can get information for multiple datafeeds in a single API request by +// using a comma-separated list of datafeeds or a wildcard expression. You can +// get information for all datafeeds by using `_all`, by specifying `*` as the +// ``, or by omitting the ``. +// This API returns a maximum of 10,000 datafeeds. package getdatafeeds import ( @@ -27,7 +32,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -76,6 +80,11 @@ func NewGetDatafeedsFunc(tp elastictransport.Interface) NewGetDatafeeds { } // Retrieves configuration information for datafeeds. +// You can get information for multiple datafeeds in a single API request by +// using a comma-separated list of datafeeds or a wildcard expression. You can +// get information for all datafeeds by using `_all`, by specifying `*` as the +// ``, or by omitting the ``. +// This API returns a maximum of 10,000 datafeeds. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed.html func New(tp elastictransport.Interface) *GetDatafeeds { @@ -268,7 +277,7 @@ func (r GetDatafeeds) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -333,3 +342,47 @@ func (r *GetDatafeeds) ExcludeGenerated(excludegenerated bool) *GetDatafeeds { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetDatafeeds) ErrorTrace(errortrace bool) *GetDatafeeds { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetDatafeeds) FilterPath(filterpaths ...string) *GetDatafeeds { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetDatafeeds) Human(human bool) *GetDatafeeds { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetDatafeeds) Pretty(pretty bool) *GetDatafeeds { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/getdatafeeds/response.go b/typedapi/ml/getdatafeeds/response.go index d448f65c7d..9f0ecb52ff 100644 --- a/typedapi/ml/getdatafeeds/response.go +++ b/typedapi/ml/getdatafeeds/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getdatafeeds @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdatafeeds // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_datafeeds/MlGetDatafeedsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_datafeeds/MlGetDatafeedsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Datafeeds []types.MLDatafeed `json:"datafeeds"` diff --git a/typedapi/ml/getdatafeedstats/get_datafeed_stats.go b/typedapi/ml/getdatafeedstats/get_datafeed_stats.go index ce7d65c5b4..a1eabbafa9 100644 --- a/typedapi/ml/getdatafeedstats/get_datafeed_stats.go +++ b/typedapi/ml/getdatafeedstats/get_datafeed_stats.go @@ -16,9 +16,15 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves usage information for datafeeds. +// You can get statistics for multiple datafeeds in a single API request by +// using a comma-separated list of datafeeds or a wildcard expression. You can +// get statistics for all datafeeds by using `_all`, by specifying `*` as the +// ``, or by omitting the ``. If the datafeed is stopped, the +// only information you receive is the `datafeed_id` and the `state`. +// This API returns a maximum of 10,000 datafeeds. package getdatafeedstats import ( @@ -27,7 +33,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -76,6 +81,12 @@ func NewGetDatafeedStatsFunc(tp elastictransport.Interface) NewGetDatafeedStats } // Retrieves usage information for datafeeds. +// You can get statistics for multiple datafeeds in a single API request by +// using a comma-separated list of datafeeds or a wildcard expression. You can +// get statistics for all datafeeds by using `_all`, by specifying `*` as the +// ``, or by omitting the ``. If the datafeed is stopped, the +// only information you receive is the `datafeed_id` and the `state`. +// This API returns a maximum of 10,000 datafeeds. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed-stats.html func New(tp elastictransport.Interface) *GetDatafeedStats { @@ -272,7 +283,7 @@ func (r GetDatafeedStats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -327,3 +338,47 @@ func (r *GetDatafeedStats) AllowNoMatch(allownomatch bool) *GetDatafeedStats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetDatafeedStats) ErrorTrace(errortrace bool) *GetDatafeedStats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetDatafeedStats) FilterPath(filterpaths ...string) *GetDatafeedStats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetDatafeedStats) Human(human bool) *GetDatafeedStats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetDatafeedStats) Pretty(pretty bool) *GetDatafeedStats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/getdatafeedstats/response.go b/typedapi/ml/getdatafeedstats/response.go index de56706010..77a18866b3 100644 --- a/typedapi/ml/getdatafeedstats/response.go +++ b/typedapi/ml/getdatafeedstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getdatafeedstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdatafeedstats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_datafeed_stats/MlGetDatafeedStatsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_datafeed_stats/MlGetDatafeedStatsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Datafeeds []types.DatafeedStats `json:"datafeeds"` diff --git a/typedapi/ml/getdataframeanalytics/get_data_frame_analytics.go b/typedapi/ml/getdataframeanalytics/get_data_frame_analytics.go index ff922bdecf..210f6f5ace 100644 --- a/typedapi/ml/getdataframeanalytics/get_data_frame_analytics.go +++ b/typedapi/ml/getdataframeanalytics/get_data_frame_analytics.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves configuration information for data frame analytics jobs. +// You can get information for multiple data frame analytics jobs in a single +// API request by using a comma-separated list of data frame analytics jobs or a +// wildcard expression. package getdataframeanalytics import ( @@ -27,7 +30,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -76,6 +78,9 @@ func NewGetDataFrameAnalyticsFunc(tp elastictransport.Interface) NewGetDataFrame } // Retrieves configuration information for data frame analytics jobs. +// You can get information for multiple data frame analytics jobs in a single +// API request by using a comma-separated list of data frame analytics jobs or a +// wildcard expression. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-dfanalytics.html func New(tp elastictransport.Interface) *GetDataFrameAnalytics { @@ -272,7 +277,7 @@ func (r GetDataFrameAnalytics) IsSuccess(providedCtx context.Context) (bool, err if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -354,3 +359,47 @@ func (r *GetDataFrameAnalytics) ExcludeGenerated(excludegenerated bool) *GetData return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetDataFrameAnalytics) ErrorTrace(errortrace bool) *GetDataFrameAnalytics { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetDataFrameAnalytics) FilterPath(filterpaths ...string) *GetDataFrameAnalytics { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetDataFrameAnalytics) Human(human bool) *GetDataFrameAnalytics { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetDataFrameAnalytics) Pretty(pretty bool) *GetDataFrameAnalytics { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/getdataframeanalytics/response.go b/typedapi/ml/getdataframeanalytics/response.go index a9adcd3f40..d0d739f429 100644 --- a/typedapi/ml/getdataframeanalytics/response.go +++ b/typedapi/ml/getdataframeanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getdataframeanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_data_frame_analytics/MlGetDataFrameAnalyticsResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_data_frame_analytics/MlGetDataFrameAnalyticsResponse.ts#L23-L29 type Response struct { Count int `json:"count"` // DataFrameAnalytics An array of data frame analytics job resources, which are sorted by the id diff --git a/typedapi/ml/getdataframeanalyticsstats/get_data_frame_analytics_stats.go b/typedapi/ml/getdataframeanalyticsstats/get_data_frame_analytics_stats.go index 83dda5f195..9751fc72ef 100644 --- a/typedapi/ml/getdataframeanalyticsstats/get_data_frame_analytics_stats.go +++ b/typedapi/ml/getdataframeanalyticsstats/get_data_frame_analytics_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves usage information for data frame analytics jobs. package getdataframeanalyticsstats @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -276,7 +275,7 @@ func (r GetDataFrameAnalyticsStats) IsSuccess(providedCtx context.Context) (bool if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -356,3 +355,47 @@ func (r *GetDataFrameAnalyticsStats) Verbose(verbose bool) *GetDataFrameAnalytic return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetDataFrameAnalyticsStats) ErrorTrace(errortrace bool) *GetDataFrameAnalyticsStats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetDataFrameAnalyticsStats) FilterPath(filterpaths ...string) *GetDataFrameAnalyticsStats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetDataFrameAnalyticsStats) Human(human bool) *GetDataFrameAnalyticsStats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetDataFrameAnalyticsStats) Pretty(pretty bool) *GetDataFrameAnalyticsStats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/getdataframeanalyticsstats/response.go b/typedapi/ml/getdataframeanalyticsstats/response.go index 6a3f684f00..ce94e9965a 100644 --- a/typedapi/ml/getdataframeanalyticsstats/response.go +++ b/typedapi/ml/getdataframeanalyticsstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getdataframeanalyticsstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getdataframeanalyticsstats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_data_frame_analytics_stats/MlGetDataFrameAnalyticsStatsResponse.ts#L24-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_data_frame_analytics_stats/MlGetDataFrameAnalyticsStatsResponse.ts#L24-L30 type Response struct { Count int64 `json:"count"` // DataFrameAnalytics An array of objects that contain usage information for data frame analytics diff --git a/typedapi/ml/getfilters/get_filters.go b/typedapi/ml/getfilters/get_filters.go index 09e433cf1d..139a180b66 100644 --- a/typedapi/ml/getfilters/get_filters.go +++ b/typedapi/ml/getfilters/get_filters.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves filters. +// You can get a single filter or all filters. package getfilters import ( @@ -27,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -76,6 +76,7 @@ func NewGetFiltersFunc(tp elastictransport.Interface) NewGetFilters { } // Retrieves filters. +// You can get a single filter or all filters. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-filter.html func New(tp elastictransport.Interface) *GetFilters { @@ -268,7 +269,7 @@ func (r GetFilters) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -320,3 +321,47 @@ func (r *GetFilters) Size(size int) *GetFilters { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetFilters) ErrorTrace(errortrace bool) *GetFilters { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetFilters) FilterPath(filterpaths ...string) *GetFilters { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetFilters) Human(human bool) *GetFilters { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetFilters) Pretty(pretty bool) *GetFilters { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/getfilters/response.go b/typedapi/ml/getfilters/response.go index d5be84d595..98e664c845 100644 --- a/typedapi/ml/getfilters/response.go +++ b/typedapi/ml/getfilters/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getfilters @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getfilters // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_filters/MlGetFiltersResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_filters/MlGetFiltersResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Filters []types.MLFilter `json:"filters"` diff --git a/typedapi/ml/getinfluencers/get_influencers.go b/typedapi/ml/getinfluencers/get_influencers.go index 215b3b32a7..4857032568 100644 --- a/typedapi/ml/getinfluencers/get_influencers.go +++ b/typedapi/ml/getinfluencers/get_influencers.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves anomaly detection job results for one or more influencers. +// Influencers are the entities that have contributed to, or are to blame for, +// the anomalies. Influencer results are available only if an +// `influencer_field_name` is specified in the job configuration. package getinfluencers import ( @@ -82,6 +85,9 @@ func NewGetInfluencersFunc(tp elastictransport.Interface) NewGetInfluencers { } // Retrieves anomaly detection job results for one or more influencers. +// Influencers are the entities that have contributed to, or are to blame for, +// the anomalies. Influencer results are available only if an +// `influencer_field_name` is specified in the job configuration. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-influencer.html func New(tp elastictransport.Interface) *GetInfluencers { @@ -389,6 +395,50 @@ func (r *GetInfluencers) Start(datetime string) *GetInfluencers { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetInfluencers) ErrorTrace(errortrace bool) *GetInfluencers { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetInfluencers) FilterPath(filterpaths ...string) *GetInfluencers { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetInfluencers) Human(human bool) *GetInfluencers { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetInfluencers) Pretty(pretty bool) *GetInfluencers { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Page Configures pagination. // This parameter has the `from` and `size` properties. // API name: page diff --git a/typedapi/ml/getinfluencers/request.go b/typedapi/ml/getinfluencers/request.go index dbb2946af7..6bf6376e1a 100644 --- a/typedapi/ml/getinfluencers/request.go +++ b/typedapi/ml/getinfluencers/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getinfluencers @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package getinfluencers // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_influencers/MlGetInfluencersRequest.ts#L26-L97 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_influencers/MlGetInfluencersRequest.ts#L26-L97 type Request struct { // Page Configures pagination. @@ -40,6 +40,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/getinfluencers/response.go b/typedapi/ml/getinfluencers/response.go index be0f402e5f..73321438c0 100644 --- a/typedapi/ml/getinfluencers/response.go +++ b/typedapi/ml/getinfluencers/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getinfluencers @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getinfluencers // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_influencers/MlGetInfluencersResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_influencers/MlGetInfluencersResponse.ts#L23-L29 type Response struct { Count int64 `json:"count"` // Influencers Array of influencer objects diff --git a/typedapi/ml/getjobs/get_jobs.go b/typedapi/ml/getjobs/get_jobs.go index c429e885a6..d0f4d508ca 100644 --- a/typedapi/ml/getjobs/get_jobs.go +++ b/typedapi/ml/getjobs/get_jobs.go @@ -16,9 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves configuration information for anomaly detection jobs. +// You can get information for multiple anomaly detection jobs in a single API +// request by using a group name, a comma-separated list of jobs, or a wildcard +// expression. You can get information for all anomaly detection jobs by using +// `_all`, by specifying `*` as the ``, or by omitting the ``. package getjobs import ( @@ -27,7 +31,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -76,6 +79,10 @@ func NewGetJobsFunc(tp elastictransport.Interface) NewGetJobs { } // Retrieves configuration information for anomaly detection jobs. +// You can get information for multiple anomaly detection jobs in a single API +// request by using a group name, a comma-separated list of jobs, or a wildcard +// expression. You can get information for all anomaly detection jobs by using +// `_all`, by specifying `*` as the ``, or by omitting the ``. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job.html func New(tp elastictransport.Interface) *GetJobs { @@ -268,7 +275,7 @@ func (r GetJobs) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -333,3 +340,47 @@ func (r *GetJobs) ExcludeGenerated(excludegenerated bool) *GetJobs { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetJobs) ErrorTrace(errortrace bool) *GetJobs { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetJobs) FilterPath(filterpaths ...string) *GetJobs { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetJobs) Human(human bool) *GetJobs { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetJobs) Pretty(pretty bool) *GetJobs { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/getjobs/response.go b/typedapi/ml/getjobs/response.go index f8b754c5bd..c3359ac259 100644 --- a/typedapi/ml/getjobs/response.go +++ b/typedapi/ml/getjobs/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getjobs @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getjobs // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_jobs/MlGetJobsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_jobs/MlGetJobsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Jobs []types.Job `json:"jobs"` diff --git a/typedapi/ml/getjobstats/get_job_stats.go b/typedapi/ml/getjobstats/get_job_stats.go index f31017265a..27cc9d1108 100644 --- a/typedapi/ml/getjobstats/get_job_stats.go +++ b/typedapi/ml/getjobstats/get_job_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves usage information for anomaly detection jobs. package getjobstats @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -272,7 +271,7 @@ func (r GetJobStats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -328,3 +327,47 @@ func (r *GetJobStats) AllowNoMatch(allownomatch bool) *GetJobStats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetJobStats) ErrorTrace(errortrace bool) *GetJobStats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetJobStats) FilterPath(filterpaths ...string) *GetJobStats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetJobStats) Human(human bool) *GetJobStats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetJobStats) Pretty(pretty bool) *GetJobStats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/getjobstats/response.go b/typedapi/ml/getjobstats/response.go index 20e3b73233..fdfc4876f7 100644 --- a/typedapi/ml/getjobstats/response.go +++ b/typedapi/ml/getjobstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getjobstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getjobstats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_job_stats/MlGetJobStatsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_job_stats/MlGetJobStatsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Jobs []types.JobStats `json:"jobs"` diff --git a/typedapi/ml/getmemorystats/get_memory_stats.go b/typedapi/ml/getmemorystats/get_memory_stats.go index 9bbc369a56..58f594f460 100644 --- a/typedapi/ml/getmemorystats/get_memory_stats.go +++ b/typedapi/ml/getmemorystats/get_memory_stats.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns information on how ML is using memory. +// Get information about how machine learning jobs and trained models are using +// memory, +// on each node, both within the JVM heap, and natively, outside of the JVM. package getmemorystats import ( @@ -27,7 +29,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -75,7 +76,9 @@ func NewGetMemoryStatsFunc(tp elastictransport.Interface) NewGetMemoryStats { } } -// Returns information on how ML is using memory. +// Get information about how machine learning jobs and trained models are using +// memory, +// on each node, both within the JVM heap, and natively, outside of the JVM. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-ml-memory.html func New(tp elastictransport.Interface) *GetMemoryStats { @@ -272,7 +275,7 @@ func (r GetMemoryStats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -340,3 +343,34 @@ func (r *GetMemoryStats) Timeout(duration string) *GetMemoryStats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetMemoryStats) ErrorTrace(errortrace bool) *GetMemoryStats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetMemoryStats) FilterPath(filterpaths ...string) *GetMemoryStats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetMemoryStats) Pretty(pretty bool) *GetMemoryStats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/getmemorystats/response.go b/typedapi/ml/getmemorystats/response.go index 03c6328ae6..c8da3ce3ef 100644 --- a/typedapi/ml/getmemorystats/response.go +++ b/typedapi/ml/getmemorystats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getmemorystats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getmemorystats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_memory_stats/MlGetMemoryStatsResponse.ts#L25-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_memory_stats/MlGetMemoryStatsResponse.ts#L25-L31 type Response struct { ClusterName string `json:"cluster_name"` NodeStats types.NodeStatistics `json:"_nodes"` diff --git a/typedapi/ml/getmodelsnapshots/get_model_snapshots.go b/typedapi/ml/getmodelsnapshots/get_model_snapshots.go index ebb723fe63..ba9701a789 100644 --- a/typedapi/ml/getmodelsnapshots/get_model_snapshots.go +++ b/typedapi/ml/getmodelsnapshots/get_model_snapshots.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves information about model snapshots. package getmodelsnapshots @@ -370,6 +370,50 @@ func (r *GetModelSnapshots) Size(size int) *GetModelSnapshots { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetModelSnapshots) ErrorTrace(errortrace bool) *GetModelSnapshots { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetModelSnapshots) FilterPath(filterpaths ...string) *GetModelSnapshots { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetModelSnapshots) Human(human bool) *GetModelSnapshots { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetModelSnapshots) Pretty(pretty bool) *GetModelSnapshots { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Desc Refer to the description for the `desc` query parameter. // API name: desc func (r *GetModelSnapshots) Desc(desc bool) *GetModelSnapshots { diff --git a/typedapi/ml/getmodelsnapshots/request.go b/typedapi/ml/getmodelsnapshots/request.go index 7694a99a04..2bea7b6e54 100644 --- a/typedapi/ml/getmodelsnapshots/request.go +++ b/typedapi/ml/getmodelsnapshots/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getmodelsnapshots @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package getmodelsnapshots // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_model_snapshots/MlGetModelSnapshotsRequest.ts#L26-L96 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_model_snapshots/MlGetModelSnapshotsRequest.ts#L26-L96 type Request struct { // Desc Refer to the description for the `desc` query parameter. @@ -50,6 +50,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -80,7 +81,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "desc": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ml/getmodelsnapshots/response.go b/typedapi/ml/getmodelsnapshots/response.go index a7411b8bb4..9fb702b2ff 100644 --- a/typedapi/ml/getmodelsnapshots/response.go +++ b/typedapi/ml/getmodelsnapshots/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getmodelsnapshots @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getmodelsnapshots // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_model_snapshots/MlGetModelSnapshotsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_model_snapshots/MlGetModelSnapshotsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` ModelSnapshots []types.ModelSnapshot `json:"model_snapshots"` diff --git a/typedapi/ml/getmodelsnapshotupgradestats/get_model_snapshot_upgrade_stats.go b/typedapi/ml/getmodelsnapshotupgradestats/get_model_snapshot_upgrade_stats.go index 22104e59b7..999f0ff0d3 100644 --- a/typedapi/ml/getmodelsnapshotupgradestats/get_model_snapshot_upgrade_stats.go +++ b/typedapi/ml/getmodelsnapshotupgradestats/get_model_snapshot_upgrade_stats.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Gets stats for anomaly detection job model snapshot upgrades that are in -// progress. +// Retrieves usage information for anomaly detection job model snapshot +// upgrades. package getmodelsnapshotupgradestats import ( @@ -28,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -83,8 +82,8 @@ func NewGetModelSnapshotUpgradeStatsFunc(tp elastictransport.Interface) NewGetMo } } -// Gets stats for anomaly detection job model snapshot upgrades that are in -// progress. +// Retrieves usage information for anomaly detection job model snapshot +// upgrades. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-job-model-snapshot-upgrade-stats.html func New(tp elastictransport.Interface) *GetModelSnapshotUpgradeStats { @@ -282,7 +281,7 @@ func (r GetModelSnapshotUpgradeStats) IsSuccess(providedCtx context.Context) (bo if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -349,3 +348,47 @@ func (r *GetModelSnapshotUpgradeStats) AllowNoMatch(allownomatch bool) *GetModel return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetModelSnapshotUpgradeStats) ErrorTrace(errortrace bool) *GetModelSnapshotUpgradeStats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetModelSnapshotUpgradeStats) FilterPath(filterpaths ...string) *GetModelSnapshotUpgradeStats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetModelSnapshotUpgradeStats) Human(human bool) *GetModelSnapshotUpgradeStats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetModelSnapshotUpgradeStats) Pretty(pretty bool) *GetModelSnapshotUpgradeStats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/getmodelsnapshotupgradestats/response.go b/typedapi/ml/getmodelsnapshotupgradestats/response.go index 5265f98c25..64d32bc9ad 100644 --- a/typedapi/ml/getmodelsnapshotupgradestats/response.go +++ b/typedapi/ml/getmodelsnapshotupgradestats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getmodelsnapshotupgradestats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getmodelsnapshotupgradestats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_model_snapshot_upgrade_stats/MlGetModelSnapshotUpgradeStatsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_model_snapshot_upgrade_stats/MlGetModelSnapshotUpgradeStatsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` ModelSnapshotUpgrades []types.ModelSnapshotUpgrade `json:"model_snapshot_upgrades"` diff --git a/typedapi/ml/getoverallbuckets/get_overall_buckets.go b/typedapi/ml/getoverallbuckets/get_overall_buckets.go index 9dddac4a3b..2142f7222c 100644 --- a/typedapi/ml/getoverallbuckets/get_overall_buckets.go +++ b/typedapi/ml/getoverallbuckets/get_overall_buckets.go @@ -16,10 +16,25 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves overall bucket results that summarize the bucket results of // multiple anomaly detection jobs. +// +// The `overall_score` is calculated by combining the scores of all the +// buckets within the overall bucket span. First, the maximum +// `anomaly_score` per anomaly detection job in the overall bucket is +// calculated. Then the `top_n` of those scores are averaged to result in +// the `overall_score`. This means that you can fine-tune the +// `overall_score` so that it is more or less sensitive to the number of +// jobs that detect an anomaly at the same time. For example, if you set +// `top_n` to `1`, the `overall_score` is the maximum bucket score in the +// overall bucket. Alternatively, if you set `top_n` to the number of jobs, +// the `overall_score` is high only when all jobs detect anomalies in that +// overall bucket. If you set the `bucket_span` parameter (to a value +// greater than its default), the `overall_score` is the maximum +// `overall_score` of the overall buckets that have a span equal to the +// jobs' largest bucket span. package getoverallbuckets import ( @@ -31,6 +46,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -84,6 +100,21 @@ func NewGetOverallBucketsFunc(tp elastictransport.Interface) NewGetOverallBucket // Retrieves overall bucket results that summarize the bucket results of // multiple anomaly detection jobs. // +// The `overall_score` is calculated by combining the scores of all the +// buckets within the overall bucket span. First, the maximum +// `anomaly_score` per anomaly detection job in the overall bucket is +// calculated. Then the `top_n` of those scores are averaged to result in +// the `overall_score`. This means that you can fine-tune the +// `overall_score` so that it is more or less sensitive to the number of +// jobs that detect an anomaly at the same time. For example, if you set +// `top_n` to `1`, the `overall_score` is the maximum bucket score in the +// overall bucket. Alternatively, if you set `top_n` to the number of jobs, +// the `overall_score` is high only when all jobs detect anomalies in that +// overall bucket. If you set the `bucket_span` parameter (to a value +// greater than its default), the `overall_score` is the maximum +// `overall_score` of the overall buckets that have a span equal to the +// jobs' largest bucket span. +// // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-overall-buckets.html func New(tp elastictransport.Interface) *GetOverallBuckets { r := &GetOverallBuckets{ @@ -325,6 +356,50 @@ func (r *GetOverallBuckets) _jobid(jobid string) *GetOverallBuckets { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetOverallBuckets) ErrorTrace(errortrace bool) *GetOverallBuckets { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetOverallBuckets) FilterPath(filterpaths ...string) *GetOverallBuckets { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetOverallBuckets) Human(human bool) *GetOverallBuckets { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetOverallBuckets) Pretty(pretty bool) *GetOverallBuckets { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // AllowNoMatch Refer to the description for the `allow_no_match` query parameter. // API name: allow_no_match func (r *GetOverallBuckets) AllowNoMatch(allownomatch bool) *GetOverallBuckets { diff --git a/typedapi/ml/getoverallbuckets/request.go b/typedapi/ml/getoverallbuckets/request.go index 0f8e9d82cf..df2f1c319f 100644 --- a/typedapi/ml/getoverallbuckets/request.go +++ b/typedapi/ml/getoverallbuckets/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getoverallbuckets @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package getoverallbuckets // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_overall_buckets/MlGetOverallBucketsRequest.ts#L25-L143 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_overall_buckets/MlGetOverallBucketsRequest.ts#L25-L143 type Request struct { // AllowNoMatch Refer to the description for the `allow_no_match` query parameter. @@ -55,6 +55,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -85,7 +86,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "allow_no_match": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -109,7 +110,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "exclude_interim": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -141,7 +142,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "top_n": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ml/getoverallbuckets/response.go b/typedapi/ml/getoverallbuckets/response.go index fb1d4d311a..cf05f516c6 100644 --- a/typedapi/ml/getoverallbuckets/response.go +++ b/typedapi/ml/getoverallbuckets/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getoverallbuckets @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getoverallbuckets // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_overall_buckets/MlGetOverallBucketsResponse.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_overall_buckets/MlGetOverallBucketsResponse.ts#L23-L29 type Response struct { Count int64 `json:"count"` // OverallBuckets Array of overall bucket objects diff --git a/typedapi/ml/getrecords/get_records.go b/typedapi/ml/getrecords/get_records.go index ec73ad71da..7904f89b4a 100644 --- a/typedapi/ml/getrecords/get_records.go +++ b/typedapi/ml/getrecords/get_records.go @@ -16,9 +16,19 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves anomaly records for an anomaly detection job. +// Records contain the detailed analytical results. They describe the anomalous +// activity that has been identified in the input data based on the detector +// configuration. +// There can be many anomaly records depending on the characteristics and size +// of the input data. In practice, there are often too many to be able to +// manually process them. The machine learning features therefore perform a +// sophisticated aggregation of the anomaly records into buckets. +// The number of record results depends on the number of anomalies found in each +// bucket, which relates to the number of time series being modeled and the +// number of detectors. package getrecords import ( @@ -82,6 +92,16 @@ func NewGetRecordsFunc(tp elastictransport.Interface) NewGetRecords { } // Retrieves anomaly records for an anomaly detection job. +// Records contain the detailed analytical results. They describe the anomalous +// activity that has been identified in the input data based on the detector +// configuration. +// There can be many anomaly records depending on the characteristics and size +// of the input data. In practice, there are often too many to be able to +// manually process them. The machine learning features therefore perform a +// sophisticated aggregation of the anomaly records into buckets. +// The number of record results depends on the number of anomalies found in each +// bucket, which relates to the number of time series being modeled and the +// number of detectors. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-record.html func New(tp elastictransport.Interface) *GetRecords { @@ -335,6 +355,50 @@ func (r *GetRecords) Size(size int) *GetRecords { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetRecords) ErrorTrace(errortrace bool) *GetRecords { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetRecords) FilterPath(filterpaths ...string) *GetRecords { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetRecords) Human(human bool) *GetRecords { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetRecords) Pretty(pretty bool) *GetRecords { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Desc Refer to the description for the `desc` query parameter. // API name: desc func (r *GetRecords) Desc(desc bool) *GetRecords { diff --git a/typedapi/ml/getrecords/request.go b/typedapi/ml/getrecords/request.go index 6c72ced254..bd9082a46f 100644 --- a/typedapi/ml/getrecords/request.go +++ b/typedapi/ml/getrecords/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getrecords @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package getrecords // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_records/MlGetAnomalyRecordsRequest.ts#L26-L127 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_records/MlGetAnomalyRecordsRequest.ts#L26-L127 type Request struct { // Desc Refer to the description for the `desc` query parameter. @@ -54,6 +54,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -84,7 +85,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "desc": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -103,7 +104,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "exclude_interim": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -122,7 +123,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "record_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ml/getrecords/response.go b/typedapi/ml/getrecords/response.go index 0d5dd415ba..b731e8721e 100644 --- a/typedapi/ml/getrecords/response.go +++ b/typedapi/ml/getrecords/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getrecords @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrecords // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_records/MlGetAnomalyRecordsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_records/MlGetAnomalyRecordsResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Records []types.Anomaly `json:"records"` diff --git a/typedapi/ml/gettrainedmodels/get_trained_models.go b/typedapi/ml/gettrainedmodels/get_trained_models.go index cdc6998fe9..d71203804e 100644 --- a/typedapi/ml/gettrainedmodels/get_trained_models.go +++ b/typedapi/ml/gettrainedmodels/get_trained_models.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Retrieves configuration information for a trained inference model. +// Retrieves configuration information for a trained model. package gettrainedmodels import ( @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -76,7 +75,7 @@ func NewGetTrainedModelsFunc(tp elastictransport.Interface) NewGetTrainedModels } } -// Retrieves configuration information for a trained inference model. +// Retrieves configuration information for a trained model. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-trained-models.html func New(tp elastictransport.Interface) *GetTrainedModels { @@ -269,7 +268,7 @@ func (r GetTrainedModels) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -297,7 +296,11 @@ func (r *GetTrainedModels) Header(key, value string) *GetTrainedModels { return r } -// ModelId The unique identifier of the trained model. +// ModelId The unique identifier of the trained model or a model alias. +// +// You can get information for multiple trained models in a single API +// request by using a comma-separated list of model IDs or a wildcard +// expression. // API Name: modelid func (r *GetTrainedModels) ModelId(modelid string) *GetTrainedModels { r.paramSet |= modelidMask @@ -369,8 +372,56 @@ func (r *GetTrainedModels) Size(size int) *GetTrainedModels { // none. When supplied, only trained models that contain all the supplied // tags are returned. // API name: tags -func (r *GetTrainedModels) Tags(tags string) *GetTrainedModels { - r.values.Set("tags", tags) +func (r *GetTrainedModels) Tags(tags ...string) *GetTrainedModels { + tmp := []string{} + for _, item := range tags { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("tags", strings.Join(tmp, ",")) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetTrainedModels) ErrorTrace(errortrace bool) *GetTrainedModels { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetTrainedModels) FilterPath(filterpaths ...string) *GetTrainedModels { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetTrainedModels) Human(human bool) *GetTrainedModels { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetTrainedModels) Pretty(pretty bool) *GetTrainedModels { + r.values.Set("pretty", strconv.FormatBool(pretty)) return r } diff --git a/typedapi/ml/gettrainedmodels/response.go b/typedapi/ml/gettrainedmodels/response.go index 418e4270d0..93cbb3b6d6 100644 --- a/typedapi/ml/gettrainedmodels/response.go +++ b/typedapi/ml/gettrainedmodels/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package gettrainedmodels @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettrainedmodels // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_trained_models/MlGetTrainedModelResponse.ts#L23-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_trained_models/MlGetTrainedModelResponse.ts#L23-L34 type Response struct { Count int `json:"count"` // TrainedModelConfigs An array of trained model resources, which are sorted by the model_id value diff --git a/typedapi/ml/gettrainedmodelsstats/get_trained_models_stats.go b/typedapi/ml/gettrainedmodelsstats/get_trained_models_stats.go index 3d401f0d48..7459189d00 100644 --- a/typedapi/ml/gettrainedmodelsstats/get_trained_models_stats.go +++ b/typedapi/ml/gettrainedmodelsstats/get_trained_models_stats.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Retrieves usage information for trained inference models. +// Retrieves usage information for trained models. You can get usage information +// for multiple trained +// models in a single API request by using a comma-separated list of model IDs +// or a wildcard expression. package gettrainedmodelsstats import ( @@ -27,7 +30,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -75,7 +77,10 @@ func NewGetTrainedModelsStatsFunc(tp elastictransport.Interface) NewGetTrainedMo } } -// Retrieves usage information for trained inference models. +// Retrieves usage information for trained models. You can get usage information +// for multiple trained +// models in a single API request by using a comma-separated list of model IDs +// or a wildcard expression. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-trained-models-stats.html func New(tp elastictransport.Interface) *GetTrainedModelsStats { @@ -272,7 +277,7 @@ func (r GetTrainedModelsStats) IsSuccess(providedCtx context.Context) (bool, err if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -340,3 +345,47 @@ func (r *GetTrainedModelsStats) Size(size int) *GetTrainedModelsStats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetTrainedModelsStats) ErrorTrace(errortrace bool) *GetTrainedModelsStats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetTrainedModelsStats) FilterPath(filterpaths ...string) *GetTrainedModelsStats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetTrainedModelsStats) Human(human bool) *GetTrainedModelsStats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetTrainedModelsStats) Pretty(pretty bool) *GetTrainedModelsStats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/gettrainedmodelsstats/response.go b/typedapi/ml/gettrainedmodelsstats/response.go index e0628c5003..cb794550f8 100644 --- a/typedapi/ml/gettrainedmodelsstats/response.go +++ b/typedapi/ml/gettrainedmodelsstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package gettrainedmodelsstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettrainedmodelsstats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_trained_models_stats/MlGetTrainedModelStatsResponse.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_trained_models_stats/MlGetTrainedModelStatsResponse.ts#L23-L33 type Response struct { // Count The total number of trained model statistics that matched the requested ID diff --git a/typedapi/ml/infertrainedmodel/infer_trained_model.go b/typedapi/ml/infertrainedmodel/infer_trained_model.go index 8f53762e86..11c137188e 100644 --- a/typedapi/ml/infertrainedmodel/infer_trained_model.go +++ b/typedapi/ml/infertrainedmodel/infer_trained_model.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Evaluate a trained model. +// Evaluates a trained model. package infertrainedmodel import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -80,7 +81,7 @@ func NewInferTrainedModelFunc(tp elastictransport.Interface) NewInferTrainedMode } } -// Evaluate a trained model. +// Evaluates a trained model. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/infer-trained-model.html func New(tp elastictransport.Interface) *InferTrainedModel { @@ -341,6 +342,50 @@ func (r *InferTrainedModel) Timeout(duration string) *InferTrainedModel { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *InferTrainedModel) ErrorTrace(errortrace bool) *InferTrainedModel { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *InferTrainedModel) FilterPath(filterpaths ...string) *InferTrainedModel { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *InferTrainedModel) Human(human bool) *InferTrainedModel { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *InferTrainedModel) Pretty(pretty bool) *InferTrainedModel { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Docs An array of objects to pass to the model for inference. The objects should // contain a fields matching your // configured trained model input. Typically, for NLP models, the field name is diff --git a/typedapi/ml/infertrainedmodel/request.go b/typedapi/ml/infertrainedmodel/request.go index afd92c398a..56d71048d8 100644 --- a/typedapi/ml/infertrainedmodel/request.go +++ b/typedapi/ml/infertrainedmodel/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package infertrainedmodel @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package infertrainedmodel // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/infer_trained_model/MlInferTrainedModelRequest.ts#L27-L59 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/infer_trained_model/MlInferTrainedModelRequest.ts#L27-L59 type Request struct { // Docs An array of objects to pass to the model for inference. The objects should @@ -45,6 +45,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/infertrainedmodel/response.go b/typedapi/ml/infertrainedmodel/response.go index ecc67f973e..c04f16fba0 100644 --- a/typedapi/ml/infertrainedmodel/response.go +++ b/typedapi/ml/infertrainedmodel/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package infertrainedmodel @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package infertrainedmodel // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/infer_trained_model/MlInferTrainedModelResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/infer_trained_model/MlInferTrainedModelResponse.ts#L22-L26 type Response struct { InferenceResults []types.InferenceResponseResult `json:"inference_results"` } diff --git a/typedapi/ml/info/info.go b/typedapi/ml/info/info.go index 7d3d18a5d9..86c7123eac 100644 --- a/typedapi/ml/info/info.go +++ b/typedapi/ml/info/info.go @@ -16,9 +16,15 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns defaults and limits used by machine learning. +// This endpoint is designed to be used by a user interface that needs to fully +// understand machine learning configurations where some options are not +// specified, meaning that the defaults should be used. This endpoint may be +// used to find out what those defaults are. It also provides information about +// the maximum size of machine learning jobs that could run in the current +// cluster configuration. package info import ( @@ -27,9 +33,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -69,6 +75,12 @@ func NewInfoFunc(tp elastictransport.Interface) NewInfo { } // Returns defaults and limits used by machine learning. +// This endpoint is designed to be used by a user interface that needs to fully +// understand machine learning configurations where some options are not +// specified, meaning that the defaults should be used. This endpoint may be +// used to find out what those defaults are. It also provides information about +// the maximum size of machine learning jobs that could run in the current +// cluster configuration. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-ml-info.html func New(tp elastictransport.Interface) *Info { @@ -248,7 +260,7 @@ func (r Info) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +287,47 @@ func (r *Info) Header(key, value string) *Info { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Info) ErrorTrace(errortrace bool) *Info { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Info) FilterPath(filterpaths ...string) *Info { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Info) Human(human bool) *Info { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Info) Pretty(pretty bool) *Info { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/info/response.go b/typedapi/ml/info/response.go index 0e2256989b..8c6eba7dcf 100644 --- a/typedapi/ml/info/response.go +++ b/typedapi/ml/info/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package info @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package info // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/info/MlInfoResponse.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/info/MlInfoResponse.ts#L22-L29 type Response struct { Defaults types.Defaults `json:"defaults"` Limits types.Limits `json:"limits"` diff --git a/typedapi/ml/openjob/open_job.go b/typedapi/ml/openjob/open_job.go index e47398ceb3..126ea7f2f0 100644 --- a/typedapi/ml/openjob/open_job.go +++ b/typedapi/ml/openjob/open_job.go @@ -16,9 +16,16 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Opens one or more anomaly detection jobs. +// An anomaly detection job must be opened in order for it to be ready to +// receive and analyze data. It can be opened and closed multiple times +// throughout its lifecycle. +// When you open a new job, it starts with an empty model. +// When you open an existing job, the most recent model state is automatically +// loaded. The job is ready to resume its analysis from where it left off, once +// new data is received. package openjob import ( @@ -30,6 +37,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -81,6 +89,13 @@ func NewOpenJobFunc(tp elastictransport.Interface) NewOpenJob { } // Opens one or more anomaly detection jobs. +// An anomaly detection job must be opened in order for it to be ready to +// receive and analyze data. It can be opened and closed multiple times +// throughout its lifecycle. +// When you open a new job, it starts with an empty model. +// When you open an existing job, the most recent model state is automatically +// loaded. The job is ready to resume its analysis from where it left off, once +// new data is received. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-open-job.html func New(tp elastictransport.Interface) *OpenJob { @@ -316,6 +331,50 @@ func (r *OpenJob) _jobid(jobid string) *OpenJob { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *OpenJob) ErrorTrace(errortrace bool) *OpenJob { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *OpenJob) FilterPath(filterpaths ...string) *OpenJob { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *OpenJob) Human(human bool) *OpenJob { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *OpenJob) Pretty(pretty bool) *OpenJob { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Timeout Refer to the description for the `timeout` query parameter. // API name: timeout func (r *OpenJob) Timeout(duration types.Duration) *OpenJob { diff --git a/typedapi/ml/openjob/request.go b/typedapi/ml/openjob/request.go index 27e846ef03..4687386ff7 100644 --- a/typedapi/ml/openjob/request.go +++ b/typedapi/ml/openjob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package openjob @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package openjob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/open_job/MlOpenJobRequest.ts#L24-L59 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/open_job/MlOpenJobRequest.ts#L24-L59 type Request struct { // Timeout Refer to the description for the `timeout` query parameter. @@ -39,6 +39,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/openjob/response.go b/typedapi/ml/openjob/response.go index 838d34ef22..9e2bc76be9 100644 --- a/typedapi/ml/openjob/response.go +++ b/typedapi/ml/openjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package openjob // Response holds the response body struct for the package openjob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/open_job/MlOpenJobResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/open_job/MlOpenJobResponse.ts#L22-L31 type Response struct { // Node The ID of the node that the job was started on. In serverless this will be diff --git a/typedapi/ml/postcalendarevents/post_calendar_events.go b/typedapi/ml/postcalendarevents/post_calendar_events.go index 3d757df41d..a750c69226 100644 --- a/typedapi/ml/postcalendarevents/post_calendar_events.go +++ b/typedapi/ml/postcalendarevents/post_calendar_events.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Posts scheduled events in a calendar. +// Adds scheduled events to a calendar. package postcalendarevents import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -80,7 +81,7 @@ func NewPostCalendarEventsFunc(tp elastictransport.Interface) NewPostCalendarEve } } -// Posts scheduled events in a calendar. +// Adds scheduled events to a calendar. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-calendar-event.html func New(tp elastictransport.Interface) *PostCalendarEvents { @@ -316,6 +317,50 @@ func (r *PostCalendarEvents) _calendarid(calendarid string) *PostCalendarEvents return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PostCalendarEvents) ErrorTrace(errortrace bool) *PostCalendarEvents { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PostCalendarEvents) FilterPath(filterpaths ...string) *PostCalendarEvents { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PostCalendarEvents) Human(human bool) *PostCalendarEvents { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PostCalendarEvents) Pretty(pretty bool) *PostCalendarEvents { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Events A list of one of more scheduled events. The event’s start and end times can // be specified as integer milliseconds since the epoch or as a string in ISO // 8601 format. diff --git a/typedapi/ml/postcalendarevents/request.go b/typedapi/ml/postcalendarevents/request.go index 04896cc586..30bab222f7 100644 --- a/typedapi/ml/postcalendarevents/request.go +++ b/typedapi/ml/postcalendarevents/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package postcalendarevents @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package postcalendarevents // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/post_calendar_events/MlPostCalendarEventsRequest.ts#L24-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/post_calendar_events/MlPostCalendarEventsRequest.ts#L24-L40 type Request struct { // Events A list of one of more scheduled events. The event’s start and end times can @@ -41,6 +41,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/postcalendarevents/response.go b/typedapi/ml/postcalendarevents/response.go index 1b4732c0b1..713c9f4fdc 100644 --- a/typedapi/ml/postcalendarevents/response.go +++ b/typedapi/ml/postcalendarevents/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package postcalendarevents @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package postcalendarevents // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/post_calendar_events/MlPostCalendarEventsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/post_calendar_events/MlPostCalendarEventsResponse.ts#L22-L24 type Response struct { Events []types.CalendarEvent `json:"events"` } diff --git a/typedapi/ml/postdata/post_data.go b/typedapi/ml/postdata/post_data.go index 245f4ed60e..a23fed1172 100644 --- a/typedapi/ml/postdata/post_data.go +++ b/typedapi/ml/postdata/post_data.go @@ -16,9 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Sends data to an anomaly detection job for analysis. +// +// IMPORTANT: For each job, data can be accepted from only a single connection +// at a time. +// It is not currently possible to post data to multiple jobs using wildcards or +// a comma-separated list. package postdata import ( @@ -30,6 +35,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -82,6 +88,11 @@ func NewPostDataFunc(tp elastictransport.Interface) NewPostData { // Sends data to an anomaly detection job for analysis. // +// IMPORTANT: For each job, data can be accepted from only a single connection +// at a time. +// It is not currently possible to post data to multiple jobs using wildcards or +// a comma-separated list. +// // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-post-data.html func New(tp elastictransport.Interface) *PostData { r := &PostData{ @@ -90,6 +101,8 @@ func New(tp elastictransport.Interface) *PostData { headers: make(http.Header), buf: gobytes.NewBuffer(nil), + + req: NewRequest(), } if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { @@ -330,3 +343,47 @@ func (r *PostData) ResetStart(datetime string) *PostData { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PostData) ErrorTrace(errortrace bool) *PostData { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PostData) FilterPath(filterpaths ...string) *PostData { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PostData) Human(human bool) *PostData { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PostData) Pretty(pretty bool) *PostData { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/postdata/request.go b/typedapi/ml/postdata/request.go index 06a4154df2..1c3e956554 100644 --- a/typedapi/ml/postdata/request.go +++ b/typedapi/ml/postdata/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package postdata @@ -26,5 +26,12 @@ import ( // Request holds the request body struct for the package postdata // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/post_data/MlPostJobDataRequest.ts#L24-L68 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/post_data/MlPostJobDataRequest.ts#L24-L68 type Request = []json.RawMessage + +// NewRequest returns a Request +func NewRequest() *Request { + r := make([]json.RawMessage, 0) + + return &r +} diff --git a/typedapi/ml/postdata/response.go b/typedapi/ml/postdata/response.go index 1677bc5744..1715582e44 100644 --- a/typedapi/ml/postdata/response.go +++ b/typedapi/ml/postdata/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package postdata // Response holds the response body struct for the package postdata // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/post_data/MlPostJobDataResponse.ts#L23-L41 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/post_data/MlPostJobDataResponse.ts#L23-L41 type Response struct { BucketCount int64 `json:"bucket_count"` EarliestRecordTimestamp int64 `json:"earliest_record_timestamp"` diff --git a/typedapi/ml/previewdatafeed/preview_datafeed.go b/typedapi/ml/previewdatafeed/preview_datafeed.go index 6cd4263558..162b8377a3 100644 --- a/typedapi/ml/previewdatafeed/preview_datafeed.go +++ b/typedapi/ml/previewdatafeed/preview_datafeed.go @@ -16,9 +16,22 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Previews a datafeed. +// This API returns the first "page" of search results from a datafeed. +// You can preview an existing datafeed or provide configuration details for a +// datafeed +// and anomaly detection job in the API. The preview shows the structure of the +// data +// that will be passed to the anomaly detection engine. +// IMPORTANT: When Elasticsearch security features are enabled, the preview uses +// the credentials of the user that +// called the API. However, when the datafeed starts it uses the roles of the +// last user that created or updated the +// datafeed. To get a preview that accurately reflects the behavior of the +// datafeed, use the appropriate credentials. +// You can also use secondary authorization headers to supply the credentials. package previewdatafeed import ( @@ -30,6 +43,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -79,6 +93,19 @@ func NewPreviewDatafeedFunc(tp elastictransport.Interface) NewPreviewDatafeed { } // Previews a datafeed. +// This API returns the first "page" of search results from a datafeed. +// You can preview an existing datafeed or provide configuration details for a +// datafeed +// and anomaly detection job in the API. The preview shows the structure of the +// data +// that will be passed to the anomaly detection engine. +// IMPORTANT: When Elasticsearch security features are enabled, the preview uses +// the credentials of the user that +// called the API. However, when the datafeed starts it uses the roles of the +// last user that created or updated the +// datafeed. To get a preview that accurately reflects the behavior of the +// datafeed, use the appropriate credentials. +// You can also use secondary authorization headers to supply the credentials. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-preview-datafeed.html func New(tp elastictransport.Interface) *PreviewDatafeed { @@ -345,6 +372,50 @@ func (r *PreviewDatafeed) End(datetime string) *PreviewDatafeed { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PreviewDatafeed) ErrorTrace(errortrace bool) *PreviewDatafeed { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PreviewDatafeed) FilterPath(filterpaths ...string) *PreviewDatafeed { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PreviewDatafeed) Human(human bool) *PreviewDatafeed { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PreviewDatafeed) Pretty(pretty bool) *PreviewDatafeed { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // DatafeedConfig The datafeed definition to preview. // API name: datafeed_config func (r *PreviewDatafeed) DatafeedConfig(datafeedconfig *types.DatafeedConfig) *PreviewDatafeed { diff --git a/typedapi/ml/previewdatafeed/request.go b/typedapi/ml/previewdatafeed/request.go index fcd5356e11..0024becb46 100644 --- a/typedapi/ml/previewdatafeed/request.go +++ b/typedapi/ml/previewdatafeed/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package previewdatafeed @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package previewdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/preview_datafeed/MlPreviewDatafeedRequest.ts#L26-L69 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/preview_datafeed/MlPreviewDatafeedRequest.ts#L26-L69 type Request struct { // DatafeedConfig The datafeed definition to preview. @@ -48,6 +48,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/previewdatafeed/response.go b/typedapi/ml/previewdatafeed/response.go index 09176fa6ac..a885e69b1d 100644 --- a/typedapi/ml/previewdatafeed/response.go +++ b/typedapi/ml/previewdatafeed/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package previewdatafeed @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package previewdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/preview_datafeed/MlPreviewDatafeedResponse.ts#L20-L23 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/preview_datafeed/MlPreviewDatafeedResponse.ts#L20-L23 type Response []json.RawMessage diff --git a/typedapi/ml/previewdataframeanalytics/preview_data_frame_analytics.go b/typedapi/ml/previewdataframeanalytics/preview_data_frame_analytics.go index e12f0dbc24..3046206ad7 100644 --- a/typedapi/ml/previewdataframeanalytics/preview_data_frame_analytics.go +++ b/typedapi/ml/previewdataframeanalytics/preview_data_frame_analytics.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Previews that will be analyzed given a data frame analytics config. +// Previews the extracted features used by a data frame analytics config. package previewdataframeanalytics import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -78,7 +79,7 @@ func NewPreviewDataFrameAnalyticsFunc(tp elastictransport.Interface) NewPreviewD } } -// Previews that will be analyzed given a data frame analytics config. +// Previews the extracted features used by a data frame analytics config. // // http://www.elastic.co/guide/en/elasticsearch/reference/current/preview-dfanalytics.html func New(tp elastictransport.Interface) *PreviewDataFrameAnalytics { @@ -327,6 +328,50 @@ func (r *PreviewDataFrameAnalytics) Id(id string) *PreviewDataFrameAnalytics { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PreviewDataFrameAnalytics) ErrorTrace(errortrace bool) *PreviewDataFrameAnalytics { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PreviewDataFrameAnalytics) FilterPath(filterpaths ...string) *PreviewDataFrameAnalytics { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PreviewDataFrameAnalytics) Human(human bool) *PreviewDataFrameAnalytics { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PreviewDataFrameAnalytics) Pretty(pretty bool) *PreviewDataFrameAnalytics { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Config A data frame analytics config as described in create data frame analytics // jobs. Note that `id` and `dest` don’t need to be provided in the context of // this API. diff --git a/typedapi/ml/previewdataframeanalytics/request.go b/typedapi/ml/previewdataframeanalytics/request.go index a89e4f759a..8e8478fe72 100644 --- a/typedapi/ml/previewdataframeanalytics/request.go +++ b/typedapi/ml/previewdataframeanalytics/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package previewdataframeanalytics @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package previewdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/preview_data_frame_analytics/MlPreviewDataFrameAnalyticsRequest.ts#L24-L47 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/preview_data_frame_analytics/MlPreviewDataFrameAnalyticsRequest.ts#L24-L47 type Request struct { // Config A data frame analytics config as described in create data frame analytics @@ -41,6 +41,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/previewdataframeanalytics/response.go b/typedapi/ml/previewdataframeanalytics/response.go index da9fef0125..0533d305f5 100644 --- a/typedapi/ml/previewdataframeanalytics/response.go +++ b/typedapi/ml/previewdataframeanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package previewdataframeanalytics // Response holds the response body struct for the package previewdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/preview_data_frame_analytics/MlPreviewDataFrameAnalyticsResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/preview_data_frame_analytics/MlPreviewDataFrameAnalyticsResponse.ts#L23-L28 type Response struct { // FeatureValues An array of objects that contain feature name and value pairs. The features diff --git a/typedapi/ml/putcalendar/put_calendar.go b/typedapi/ml/putcalendar/put_calendar.go index 013c899330..e06d1edb3d 100644 --- a/typedapi/ml/putcalendar/put_calendar.go +++ b/typedapi/ml/putcalendar/put_calendar.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Instantiates a calendar. +// Creates a calendar. package putcalendar import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -80,7 +81,7 @@ func NewPutCalendarFunc(tp elastictransport.Interface) NewPutCalendar { } } -// Instantiates a calendar. +// Creates a calendar. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-calendar.html func New(tp elastictransport.Interface) *PutCalendar { @@ -314,6 +315,50 @@ func (r *PutCalendar) _calendarid(calendarid string) *PutCalendar { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutCalendar) ErrorTrace(errortrace bool) *PutCalendar { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutCalendar) FilterPath(filterpaths ...string) *PutCalendar { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutCalendar) Human(human bool) *PutCalendar { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutCalendar) Pretty(pretty bool) *PutCalendar { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Description A description of the calendar. // API name: description func (r *PutCalendar) Description(description string) *PutCalendar { diff --git a/typedapi/ml/putcalendar/request.go b/typedapi/ml/putcalendar/request.go index cf99459199..f9edfeae7e 100644 --- a/typedapi/ml/putcalendar/request.go +++ b/typedapi/ml/putcalendar/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putcalendar @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package putcalendar // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_calendar/MlPutCalendarRequest.ts#L23-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_calendar/MlPutCalendarRequest.ts#L23-L43 type Request struct { // Description A description of the calendar. @@ -39,6 +39,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/putcalendar/response.go b/typedapi/ml/putcalendar/response.go index 0085e3a27c..916e9d491b 100644 --- a/typedapi/ml/putcalendar/response.go +++ b/typedapi/ml/putcalendar/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putcalendar @@ -31,7 +31,7 @@ import ( // Response holds the response body struct for the package putcalendar // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_calendar/MlPutCalendarResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_calendar/MlPutCalendarResponse.ts#L22-L31 type Response struct { // CalendarId A string that uniquely identifies a calendar. diff --git a/typedapi/ml/putcalendarjob/put_calendar_job.go b/typedapi/ml/putcalendarjob/put_calendar_job.go index ab7ef02b4f..9446e5da04 100644 --- a/typedapi/ml/putcalendarjob/put_calendar_job.go +++ b/typedapi/ml/putcalendarjob/put_calendar_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Adds an anomaly detection job to a calendar. package putcalendarjob @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -275,7 +275,7 @@ func (r PutCalendarJob) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -321,3 +321,47 @@ func (r *PutCalendarJob) _jobid(jobid string) *PutCalendarJob { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutCalendarJob) ErrorTrace(errortrace bool) *PutCalendarJob { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutCalendarJob) FilterPath(filterpaths ...string) *PutCalendarJob { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutCalendarJob) Human(human bool) *PutCalendarJob { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutCalendarJob) Pretty(pretty bool) *PutCalendarJob { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/putcalendarjob/response.go b/typedapi/ml/putcalendarjob/response.go index 536c6421de..f96c06ed04 100644 --- a/typedapi/ml/putcalendarjob/response.go +++ b/typedapi/ml/putcalendarjob/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putcalendarjob @@ -31,7 +31,7 @@ import ( // Response holds the response body struct for the package putcalendarjob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_calendar_job/MlPutCalendarJobResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_calendar_job/MlPutCalendarJobResponse.ts#L22-L31 type Response struct { // CalendarId A string that uniquely identifies a calendar. diff --git a/typedapi/ml/putdatafeed/put_datafeed.go b/typedapi/ml/putdatafeed/put_datafeed.go index 9ba5536b23..a4efeff5c9 100644 --- a/typedapi/ml/putdatafeed/put_datafeed.go +++ b/typedapi/ml/putdatafeed/put_datafeed.go @@ -16,9 +16,24 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Instantiates a datafeed. +// Datafeeds retrieve data from Elasticsearch for analysis by an anomaly +// detection job. +// You can associate only one datafeed with each anomaly detection job. +// The datafeed contains a query that runs at a defined interval (`frequency`). +// If you are concerned about delayed data, you can add a delay (`query_delay') +// at each interval. +// When Elasticsearch security features are enabled, your datafeed remembers +// which roles the user who created it had +// at the time of creation and runs the query using those same roles. If you +// provide secondary authorization headers, +// those credentials are used instead. +// You must use Kibana, this API, or the create anomaly detection jobs API to +// create a datafeed. Do not add a datafeed +// directly to the `.ml-config` index. Do not give users `write` privileges on +// the `.ml-config` index. package putdatafeed import ( @@ -83,6 +98,21 @@ func NewPutDatafeedFunc(tp elastictransport.Interface) NewPutDatafeed { } // Instantiates a datafeed. +// Datafeeds retrieve data from Elasticsearch for analysis by an anomaly +// detection job. +// You can associate only one datafeed with each anomaly detection job. +// The datafeed contains a query that runs at a defined interval (`frequency`). +// If you are concerned about delayed data, you can add a delay (`query_delay') +// at each interval. +// When Elasticsearch security features are enabled, your datafeed remembers +// which roles the user who created it had +// at the time of creation and runs the query using those same roles. If you +// provide secondary authorization headers, +// those credentials are used instead. +// You must use Kibana, this API, or the create anomaly detection jobs API to +// create a datafeed. Do not add a datafeed +// directly to the `.ml-config` index. Do not give users `write` privileges on +// the `.ml-config` index. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-datafeed.html func New(tp elastictransport.Interface) *PutDatafeed { @@ -360,6 +390,50 @@ func (r *PutDatafeed) IgnoreUnavailable(ignoreunavailable bool) *PutDatafeed { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutDatafeed) ErrorTrace(errortrace bool) *PutDatafeed { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutDatafeed) FilterPath(filterpaths ...string) *PutDatafeed { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutDatafeed) Human(human bool) *PutDatafeed { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutDatafeed) Pretty(pretty bool) *PutDatafeed { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Aggregations If set, the datafeed performs aggregation searches. // Support for aggregations is limited and should be used only with low // cardinality data. diff --git a/typedapi/ml/putdatafeed/request.go b/typedapi/ml/putdatafeed/request.go index d84d29d01f..5c06dda230 100644 --- a/typedapi/ml/putdatafeed/request.go +++ b/typedapi/ml/putdatafeed/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putdatafeed @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_datafeed/MlPutDatafeedRequest.ts#L37-L172 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_datafeed/MlPutDatafeedRequest.ts#L37-L172 type Request struct { // Aggregations If set, the datafeed performs aggregation searches. @@ -121,6 +121,7 @@ func NewRequest() *Request { Aggregations: make(map[string]types.Aggregations, 0), ScriptFields: make(map[string]types.ScriptField, 0), } + return r } @@ -206,7 +207,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_empty_searches": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -245,7 +246,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "scroll_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ml/putdatafeed/response.go b/typedapi/ml/putdatafeed/response.go index 682703a108..6f89da8b9e 100644 --- a/typedapi/ml/putdatafeed/response.go +++ b/typedapi/ml/putdatafeed/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putdatafeed @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_datafeed/MlPutDatafeedResponse.ts#L31-L49 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_datafeed/MlPutDatafeedResponse.ts#L31-L49 type Response struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` Authorization *types.DatafeedAuthorization `json:"authorization,omitempty"` diff --git a/typedapi/ml/putdataframeanalytics/put_data_frame_analytics.go b/typedapi/ml/putdataframeanalytics/put_data_frame_analytics.go index 1a69d99ece..96b69bb870 100644 --- a/typedapi/ml/putdataframeanalytics/put_data_frame_analytics.go +++ b/typedapi/ml/putdataframeanalytics/put_data_frame_analytics.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Instantiates a data frame analytics job. +// This API creates a data frame analytics job that performs an analysis on the +// source indices and stores the outcome in a destination index. package putdataframeanalytics import ( @@ -30,6 +32,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -81,6 +84,8 @@ func NewPutDataFrameAnalyticsFunc(tp elastictransport.Interface) NewPutDataFrame } // Instantiates a data frame analytics job. +// This API creates a data frame analytics job that performs an analysis on the +// source indices and stores the outcome in a destination index. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-dfanalytics.html func New(tp elastictransport.Interface) *PutDataFrameAnalytics { @@ -318,6 +323,50 @@ func (r *PutDataFrameAnalytics) _id(id string) *PutDataFrameAnalytics { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutDataFrameAnalytics) ErrorTrace(errortrace bool) *PutDataFrameAnalytics { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutDataFrameAnalytics) FilterPath(filterpaths ...string) *PutDataFrameAnalytics { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutDataFrameAnalytics) Human(human bool) *PutDataFrameAnalytics { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutDataFrameAnalytics) Pretty(pretty bool) *PutDataFrameAnalytics { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // AllowLazyStart Specifies whether this job can start when there is insufficient machine // learning node capacity for it to be immediately assigned to a node. If // set to `false` and a machine learning node with capacity to run the job diff --git a/typedapi/ml/putdataframeanalytics/request.go b/typedapi/ml/putdataframeanalytics/request.go index 2814ef5b5b..5458fecf78 100644 --- a/typedapi/ml/putdataframeanalytics/request.go +++ b/typedapi/ml/putdataframeanalytics/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putdataframeanalytics @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_data_frame_analytics/MlPutDataFrameAnalyticsRequest.ts#L30-L141 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_data_frame_analytics/MlPutDataFrameAnalyticsRequest.ts#L30-L141 type Request struct { // AllowLazyStart Specifies whether this job can start when there is insufficient machine @@ -102,6 +102,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -132,7 +133,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "allow_lazy_start": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -179,7 +180,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_num_threads": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ml/putdataframeanalytics/response.go b/typedapi/ml/putdataframeanalytics/response.go index 3cf5a1fcf9..83ca6ba4be 100644 --- a/typedapi/ml/putdataframeanalytics/response.go +++ b/typedapi/ml/putdataframeanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putdataframeanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_data_frame_analytics/MlPutDataFrameAnalyticsResponse.ts#L31-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_data_frame_analytics/MlPutDataFrameAnalyticsResponse.ts#L31-L46 type Response struct { AllowLazyStart bool `json:"allow_lazy_start"` Analysis types.DataframeAnalysisContainer `json:"analysis"` diff --git a/typedapi/ml/putfilter/put_filter.go b/typedapi/ml/putfilter/put_filter.go index e9353f9b22..eb7e4869c5 100644 --- a/typedapi/ml/putfilter/put_filter.go +++ b/typedapi/ml/putfilter/put_filter.go @@ -16,9 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Instantiates a filter. +// A filter contains a list of strings. It can be used by one or more anomaly +// detection jobs. +// Specifically, filters are referenced in the `custom_rules` property of +// detector configuration objects. package putfilter import ( @@ -30,6 +34,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -81,6 +86,10 @@ func NewPutFilterFunc(tp elastictransport.Interface) NewPutFilter { } // Instantiates a filter. +// A filter contains a list of strings. It can be used by one or more anomaly +// detection jobs. +// Specifically, filters are referenced in the `custom_rules` property of +// detector configuration objects. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-filter.html func New(tp elastictransport.Interface) *PutFilter { @@ -314,6 +323,50 @@ func (r *PutFilter) _filterid(filterid string) *PutFilter { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutFilter) ErrorTrace(errortrace bool) *PutFilter { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutFilter) FilterPath(filterpaths ...string) *PutFilter { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutFilter) Human(human bool) *PutFilter { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutFilter) Pretty(pretty bool) *PutFilter { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Description A description of the filter. // API name: description func (r *PutFilter) Description(description string) *PutFilter { diff --git a/typedapi/ml/putfilter/request.go b/typedapi/ml/putfilter/request.go index 83b972866d..85e408a55f 100644 --- a/typedapi/ml/putfilter/request.go +++ b/typedapi/ml/putfilter/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putfilter @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package putfilter // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_filter/MlPutFilterRequest.ts#L23-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_filter/MlPutFilterRequest.ts#L23-L50 type Request struct { // Description A description of the filter. @@ -41,6 +41,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/putfilter/response.go b/typedapi/ml/putfilter/response.go index 793f26f97c..edd5ff763f 100644 --- a/typedapi/ml/putfilter/response.go +++ b/typedapi/ml/putfilter/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putfilter // Response holds the response body struct for the package putfilter // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_filter/MlPutFilterResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_filter/MlPutFilterResponse.ts#L22-L28 type Response struct { Description string `json:"description"` FilterId string `json:"filter_id"` diff --git a/typedapi/ml/putjob/put_job.go b/typedapi/ml/putjob/put_job.go index ab5915535c..a2c26c0ad2 100644 --- a/typedapi/ml/putjob/put_job.go +++ b/typedapi/ml/putjob/put_job.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Instantiates an anomaly detection job. +// Instantiates an anomaly detection job. If you include a `datafeed_config`, +// you must have read index privileges on the source index. package putjob import ( @@ -30,6 +31,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -80,7 +82,8 @@ func NewPutJobFunc(tp elastictransport.Interface) NewPutJob { } } -// Instantiates an anomaly detection job. +// Instantiates an anomaly detection job. If you include a `datafeed_config`, +// you must have read index privileges on the source index. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-put-job.html func New(tp elastictransport.Interface) *PutJob { @@ -316,6 +319,50 @@ func (r *PutJob) _jobid(jobid string) *PutJob { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutJob) ErrorTrace(errortrace bool) *PutJob { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutJob) FilterPath(filterpaths ...string) *PutJob { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutJob) Human(human bool) *PutJob { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutJob) Pretty(pretty bool) *PutJob { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // AllowLazyOpen Advanced configuration option. Specifies whether this job can open when there // is insufficient machine learning node capacity for it to be immediately // assigned to a node. By default, if a machine learning node with capacity to diff --git a/typedapi/ml/putjob/request.go b/typedapi/ml/putjob/request.go index 0ec8646443..e88cacdd5c 100644 --- a/typedapi/ml/putjob/request.go +++ b/typedapi/ml/putjob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putjob @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putjob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_job/MlPutJobRequest.ts#L30-L111 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_job/MlPutJobRequest.ts#L30-L111 type Request struct { // AllowLazyOpen Advanced configuration option. Specifies whether this job can open when there @@ -120,6 +120,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -150,7 +151,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "allow_lazy_open": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -184,7 +185,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "daily_model_snapshot_retention_after_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -231,7 +232,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "model_snapshot_retention_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -246,7 +247,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "renormalization_window_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -266,7 +267,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "results_retention_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ml/putjob/response.go b/typedapi/ml/putjob/response.go index 89afc08e00..3b04dbfbe2 100644 --- a/typedapi/ml/putjob/response.go +++ b/typedapi/ml/putjob/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putjob @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package putjob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_job/MlPutJobResponse.ts#L29-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_job/MlPutJobResponse.ts#L29-L52 type Response struct { AllowLazyOpen bool `json:"allow_lazy_open"` AnalysisConfig types.AnalysisConfigRead `json:"analysis_config"` diff --git a/typedapi/ml/puttrainedmodel/put_trained_model.go b/typedapi/ml/puttrainedmodel/put_trained_model.go index a0c1a48bf1..862a148a25 100644 --- a/typedapi/ml/puttrainedmodel/put_trained_model.go +++ b/typedapi/ml/puttrainedmodel/put_trained_model.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates an inference trained model. +// Enables you to supply a trained model that is not created by data frame +// analytics. package puttrainedmodel import ( @@ -82,7 +83,8 @@ func NewPutTrainedModelFunc(tp elastictransport.Interface) NewPutTrainedModel { } } -// Creates an inference trained model. +// Enables you to supply a trained model that is not created by data frame +// analytics. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-trained-models.html func New(tp elastictransport.Interface) *PutTrainedModel { @@ -316,8 +318,9 @@ func (r *PutTrainedModel) _modelid(modelid string) *PutTrainedModel { return r } -// DeferDefinitionDecompression If set to `true` and a `compressed_definition` is provided, the request -// defers definition decompression and skips relevant validations. +// DeferDefinitionDecompression If set to `true` and a `compressed_definition` is provided, +// the request defers definition decompression and skips relevant +// validations. // API name: defer_definition_decompression func (r *PutTrainedModel) DeferDefinitionDecompression(deferdefinitiondecompression bool) *PutTrainedModel { r.values.Set("defer_definition_decompression", strconv.FormatBool(deferdefinitiondecompression)) @@ -325,6 +328,59 @@ func (r *PutTrainedModel) DeferDefinitionDecompression(deferdefinitiondecompress return r } +// WaitForCompletion Whether to wait for all child operations (e.g. model download) +// to complete. +// API name: wait_for_completion +func (r *PutTrainedModel) WaitForCompletion(waitforcompletion bool) *PutTrainedModel { + r.values.Set("wait_for_completion", strconv.FormatBool(waitforcompletion)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutTrainedModel) ErrorTrace(errortrace bool) *PutTrainedModel { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutTrainedModel) FilterPath(filterpaths ...string) *PutTrainedModel { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutTrainedModel) Human(human bool) *PutTrainedModel { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutTrainedModel) Pretty(pretty bool) *PutTrainedModel { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // CompressedDefinition The compressed (GZipped and Base64 encoded) inference definition of the // model. If compressed_definition is specified, then definition cannot be // specified. @@ -382,7 +438,7 @@ func (r *PutTrainedModel) Input(input *types.Input) *PutTrainedModel { // metadata should be a json.RawMessage or a structure // if a structure is provided, the client will defer a json serialization // prior to sending the payload to Elasticsearch. -func (r *PutTrainedModel) Metadata(metadata interface{}) *PutTrainedModel { +func (r *PutTrainedModel) Metadata(metadata any) *PutTrainedModel { switch casted := metadata.(type) { case json.RawMessage: r.req.Metadata = casted diff --git a/typedapi/ml/puttrainedmodel/request.go b/typedapi/ml/puttrainedmodel/request.go index df33e98f80..6d38a61aea 100644 --- a/typedapi/ml/puttrainedmodel/request.go +++ b/typedapi/ml/puttrainedmodel/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package puttrainedmodel @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package puttrainedmodel // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model/MlPutTrainedModelRequest.ts#L29-L113 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model/MlPutTrainedModelRequest.ts#L29-L124 type Request struct { // CompressedDefinition The compressed (GZipped and Base64 encoded) inference definition of the @@ -77,6 +77,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/puttrainedmodel/response.go b/typedapi/ml/puttrainedmodel/response.go index 44780ee0b7..b0412d4246 100644 --- a/typedapi/ml/puttrainedmodel/response.go +++ b/typedapi/ml/puttrainedmodel/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package puttrainedmodel @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package puttrainedmodel // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model/MlPutTrainedModelResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model/MlPutTrainedModelResponse.ts#L22-L24 type Response struct { CompressedDefinition *string `json:"compressed_definition,omitempty"` // CreateTime The time when the trained model was created. diff --git a/typedapi/ml/puttrainedmodelalias/put_trained_model_alias.go b/typedapi/ml/puttrainedmodelalias/put_trained_model_alias.go index d3ff7dc49d..40f393da3c 100644 --- a/typedapi/ml/puttrainedmodelalias/put_trained_model_alias.go +++ b/typedapi/ml/puttrainedmodelalias/put_trained_model_alias.go @@ -16,10 +16,24 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Creates a new model alias (or reassigns an existing one) to refer to the -// trained model +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Creates or updates a trained model alias. A trained model alias is a logical +// name used to reference a single trained model. +// You can use aliases instead of trained model identifiers to make it easier to +// reference your models. For example, you can use aliases in inference +// aggregations and processors. +// An alias must be unique and refer to only a single trained model. However, +// you can have multiple aliases for each trained model. +// If you use this API to update an alias such that it references a different +// trained model ID and the model uses a different type of data frame analytics, +// an error occurs. For example, this situation occurs if you have a trained +// model for regression analysis and a trained model for classification +// analysis; you cannot reassign an alias from one type of trained model to +// another. +// If you use this API to update an alias and there are very few input fields in +// common between the old and new trained models for the model alias, the API +// returns a warning. package puttrainedmodelalias import ( @@ -28,7 +42,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -83,8 +96,22 @@ func NewPutTrainedModelAliasFunc(tp elastictransport.Interface) NewPutTrainedMod } } -// Creates a new model alias (or reassigns an existing one) to refer to the -// trained model +// Creates or updates a trained model alias. A trained model alias is a logical +// name used to reference a single trained model. +// You can use aliases instead of trained model identifiers to make it easier to +// reference your models. For example, you can use aliases in inference +// aggregations and processors. +// An alias must be unique and refer to only a single trained model. However, +// you can have multiple aliases for each trained model. +// If you use this API to update an alias such that it references a different +// trained model ID and the model uses a different type of data frame analytics, +// an error occurs. For example, this situation occurs if you have a trained +// model for regression analysis and a trained model for classification +// analysis; you cannot reassign an alias from one type of trained model to +// another. +// If you use this API to update an alias and there are very few input fields in +// common between the old and new trained models for the model alias, the API +// returns a warning. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-trained-models-aliases.html func New(tp elastictransport.Interface) *PutTrainedModelAlias { @@ -284,7 +311,7 @@ func (r PutTrainedModelAlias) IsSuccess(providedCtx context.Context) (bool, erro if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -339,3 +366,47 @@ func (r *PutTrainedModelAlias) Reassign(reassign bool) *PutTrainedModelAlias { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutTrainedModelAlias) ErrorTrace(errortrace bool) *PutTrainedModelAlias { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutTrainedModelAlias) FilterPath(filterpaths ...string) *PutTrainedModelAlias { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutTrainedModelAlias) Human(human bool) *PutTrainedModelAlias { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutTrainedModelAlias) Pretty(pretty bool) *PutTrainedModelAlias { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/puttrainedmodelalias/response.go b/typedapi/ml/puttrainedmodelalias/response.go index 7afda2b902..4a10319e48 100644 --- a/typedapi/ml/puttrainedmodelalias/response.go +++ b/typedapi/ml/puttrainedmodelalias/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package puttrainedmodelalias // Response holds the response body struct for the package puttrainedmodelalias // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model_alias/MlPutTrainedModelAliasResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model_alias/MlPutTrainedModelAliasResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/puttrainedmodeldefinitionpart/put_trained_model_definition_part.go b/typedapi/ml/puttrainedmodeldefinitionpart/put_trained_model_definition_part.go index c847227a42..d8c681ee38 100644 --- a/typedapi/ml/puttrainedmodeldefinitionpart/put_trained_model_definition_part.go +++ b/typedapi/ml/puttrainedmodeldefinitionpart/put_trained_model_definition_part.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates part of a trained model definition +// Creates part of a trained model definition. package puttrainedmodeldefinitionpart import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -85,7 +86,7 @@ func NewPutTrainedModelDefinitionPartFunc(tp elastictransport.Interface) NewPutT } } -// Creates part of a trained model definition +// Creates part of a trained model definition. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-trained-model-definition-part.html func New(tp elastictransport.Interface) *PutTrainedModelDefinitionPart { @@ -339,6 +340,50 @@ func (r *PutTrainedModelDefinitionPart) _part(part string) *PutTrainedModelDefin return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutTrainedModelDefinitionPart) ErrorTrace(errortrace bool) *PutTrainedModelDefinitionPart { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutTrainedModelDefinitionPart) FilterPath(filterpaths ...string) *PutTrainedModelDefinitionPart { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutTrainedModelDefinitionPart) Human(human bool) *PutTrainedModelDefinitionPart { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutTrainedModelDefinitionPart) Pretty(pretty bool) *PutTrainedModelDefinitionPart { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Definition The definition part for the model. Must be a base64 encoded string. // API name: definition func (r *PutTrainedModelDefinitionPart) Definition(definition string) *PutTrainedModelDefinitionPart { diff --git a/typedapi/ml/puttrainedmodeldefinitionpart/request.go b/typedapi/ml/puttrainedmodeldefinitionpart/request.go index c5c1ba58cd..e8e711468e 100644 --- a/typedapi/ml/puttrainedmodeldefinitionpart/request.go +++ b/typedapi/ml/puttrainedmodeldefinitionpart/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package puttrainedmodeldefinitionpart @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package puttrainedmodeldefinitionpart // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model_definition_part/MlPutTrainedModelDefinitionPartRequest.ts#L24-L57 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model_definition_part/MlPutTrainedModelDefinitionPartRequest.ts#L24-L57 type Request struct { // Definition The definition part for the model. Must be a base64 encoded string. @@ -41,6 +41,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/puttrainedmodeldefinitionpart/response.go b/typedapi/ml/puttrainedmodeldefinitionpart/response.go index 041dcc5539..76879642f7 100644 --- a/typedapi/ml/puttrainedmodeldefinitionpart/response.go +++ b/typedapi/ml/puttrainedmodeldefinitionpart/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package puttrainedmodeldefinitionpart // Response holds the response body struct for the package puttrainedmodeldefinitionpart // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model_definition_part/MlPutTrainedModelDefinitionPartResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model_definition_part/MlPutTrainedModelDefinitionPartResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/puttrainedmodelvocabulary/put_trained_model_vocabulary.go b/typedapi/ml/puttrainedmodelvocabulary/put_trained_model_vocabulary.go index 689a519fd2..d9b63b8591 100644 --- a/typedapi/ml/puttrainedmodelvocabulary/put_trained_model_vocabulary.go +++ b/typedapi/ml/puttrainedmodelvocabulary/put_trained_model_vocabulary.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates a trained model vocabulary +// Creates a trained model vocabulary. +// This API is supported only for natural language processing (NLP) models. +// The vocabulary is stored in the index as described in +// `inference_config.*.vocabulary` of the trained model definition. package puttrainedmodelvocabulary import ( @@ -30,6 +33,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -80,7 +84,10 @@ func NewPutTrainedModelVocabularyFunc(tp elastictransport.Interface) NewPutTrain } } -// Creates a trained model vocabulary +// Creates a trained model vocabulary. +// This API is supported only for natural language processing (NLP) models. +// The vocabulary is stored in the index as described in +// `inference_config.*.vocabulary` of the trained model definition. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-trained-model-vocabulary.html func New(tp elastictransport.Interface) *PutTrainedModelVocabulary { @@ -316,6 +323,50 @@ func (r *PutTrainedModelVocabulary) _modelid(modelid string) *PutTrainedModelVoc return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutTrainedModelVocabulary) ErrorTrace(errortrace bool) *PutTrainedModelVocabulary { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutTrainedModelVocabulary) FilterPath(filterpaths ...string) *PutTrainedModelVocabulary { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutTrainedModelVocabulary) Human(human bool) *PutTrainedModelVocabulary { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutTrainedModelVocabulary) Pretty(pretty bool) *PutTrainedModelVocabulary { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Merges The optional model merges if required by the tokenizer. // API name: merges func (r *PutTrainedModelVocabulary) Merges(merges ...string) *PutTrainedModelVocabulary { diff --git a/typedapi/ml/puttrainedmodelvocabulary/request.go b/typedapi/ml/puttrainedmodelvocabulary/request.go index eec6d21b16..8619bb61ca 100644 --- a/typedapi/ml/puttrainedmodelvocabulary/request.go +++ b/typedapi/ml/puttrainedmodelvocabulary/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package puttrainedmodelvocabulary @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package puttrainedmodelvocabulary // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model_vocabulary/MlPutTrainedModelVocabularyRequest.ts#L24-L60 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model_vocabulary/MlPutTrainedModelVocabularyRequest.ts#L24-L60 type Request struct { // Merges The optional model merges if required by the tokenizer. @@ -43,6 +43,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/puttrainedmodelvocabulary/response.go b/typedapi/ml/puttrainedmodelvocabulary/response.go index 7ec5586a4d..0c3b262d07 100644 --- a/typedapi/ml/puttrainedmodelvocabulary/response.go +++ b/typedapi/ml/puttrainedmodelvocabulary/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package puttrainedmodelvocabulary // Response holds the response body struct for the package puttrainedmodelvocabulary // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model_vocabulary/MlPutTrainedModelVocabularyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model_vocabulary/MlPutTrainedModelVocabularyResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/resetjob/reset_job.go b/typedapi/ml/resetjob/reset_job.go index 5ee576e8d2..362eff5f79 100644 --- a/typedapi/ml/resetjob/reset_job.go +++ b/typedapi/ml/resetjob/reset_job.go @@ -16,9 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Resets an existing anomaly detection job. +// Resets an anomaly detection job. +// All model state and results are deleted. The job is ready to start over as if +// it had just been created. +// It is not currently possible to reset multiple jobs using wildcards or a +// comma separated list. package resetjob import ( @@ -27,7 +31,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -77,7 +80,11 @@ func NewResetJobFunc(tp elastictransport.Interface) NewResetJob { } } -// Resets an existing anomaly detection job. +// Resets an anomaly detection job. +// All model state and results are deleted. The job is ready to start over as if +// it had just been created. +// It is not currently possible to reset multiple jobs using wildcards or a +// comma separated list. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-reset-job.html func New(tp elastictransport.Interface) *ResetJob { @@ -265,7 +272,7 @@ func (r ResetJob) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -321,3 +328,47 @@ func (r *ResetJob) DeleteUserAnnotations(deleteuserannotations bool) *ResetJob { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ResetJob) ErrorTrace(errortrace bool) *ResetJob { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ResetJob) FilterPath(filterpaths ...string) *ResetJob { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ResetJob) Human(human bool) *ResetJob { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ResetJob) Pretty(pretty bool) *ResetJob { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/resetjob/response.go b/typedapi/ml/resetjob/response.go index f4f9d4bc97..ac66688a45 100644 --- a/typedapi/ml/resetjob/response.go +++ b/typedapi/ml/resetjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package resetjob // Response holds the response body struct for the package resetjob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/reset_job/MlResetJobResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/reset_job/MlResetJobResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/revertmodelsnapshot/request.go b/typedapi/ml/revertmodelsnapshot/request.go index 7ff35122df..9954e01ffb 100644 --- a/typedapi/ml/revertmodelsnapshot/request.go +++ b/typedapi/ml/revertmodelsnapshot/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package revertmodelsnapshot @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package revertmodelsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/revert_model_snapshot/MlRevertModelSnapshotRequest.ts#L23-L69 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/revert_model_snapshot/MlRevertModelSnapshotRequest.ts#L23-L69 type Request struct { // DeleteInterveningResults Refer to the description for the `delete_intervening_results` query @@ -38,6 +38,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/revertmodelsnapshot/response.go b/typedapi/ml/revertmodelsnapshot/response.go index 7380d91b85..d5d1aa0e59 100644 --- a/typedapi/ml/revertmodelsnapshot/response.go +++ b/typedapi/ml/revertmodelsnapshot/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package revertmodelsnapshot @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package revertmodelsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/revert_model_snapshot/MlRevertModelSnapshotResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/revert_model_snapshot/MlRevertModelSnapshotResponse.ts#L22-L24 type Response struct { Model types.ModelSnapshot `json:"model"` } diff --git a/typedapi/ml/revertmodelsnapshot/revert_model_snapshot.go b/typedapi/ml/revertmodelsnapshot/revert_model_snapshot.go index a1d2a0f146..70836b57f7 100644 --- a/typedapi/ml/revertmodelsnapshot/revert_model_snapshot.go +++ b/typedapi/ml/revertmodelsnapshot/revert_model_snapshot.go @@ -16,9 +16,16 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Reverts to a specific snapshot. +// The machine learning features react quickly to anomalous input, learning new +// behaviors in data. Highly anomalous input increases the variance in the +// models whilst the system learns whether this is a new step-change in behavior +// or a one-off event. In the case where this anomalous input is known to be a +// one-off, then it might be appropriate to reset the model state to a time +// before this event. For example, you might consider reverting to a saved +// snapshot after Black Friday or a critical system failure. package revertmodelsnapshot import ( @@ -30,6 +37,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -86,6 +94,13 @@ func NewRevertModelSnapshotFunc(tp elastictransport.Interface) NewRevertModelSna } // Reverts to a specific snapshot. +// The machine learning features react quickly to anomalous input, learning new +// behaviors in data. Highly anomalous input increases the variance in the +// models whilst the system learns whether this is a new step-change in behavior +// or a one-off event. In the case where this anomalous input is known to be a +// one-off, then it might be appropriate to reset the model state to a time +// before this event. For example, you might consider reverting to a saved +// snapshot after Black Friday or a critical system failure. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-revert-snapshot.html func New(tp elastictransport.Interface) *RevertModelSnapshot { @@ -340,6 +355,50 @@ func (r *RevertModelSnapshot) _snapshotid(snapshotid string) *RevertModelSnapsho return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *RevertModelSnapshot) ErrorTrace(errortrace bool) *RevertModelSnapshot { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *RevertModelSnapshot) FilterPath(filterpaths ...string) *RevertModelSnapshot { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *RevertModelSnapshot) Human(human bool) *RevertModelSnapshot { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *RevertModelSnapshot) Pretty(pretty bool) *RevertModelSnapshot { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // DeleteInterveningResults Refer to the description for the `delete_intervening_results` query // parameter. // API name: delete_intervening_results diff --git a/typedapi/ml/setupgrademode/response.go b/typedapi/ml/setupgrademode/response.go index a34c6b2e21..3dff70eaab 100644 --- a/typedapi/ml/setupgrademode/response.go +++ b/typedapi/ml/setupgrademode/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package setupgrademode // Response holds the response body struct for the package setupgrademode // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/set_upgrade_mode/MlSetUpgradeModeResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/set_upgrade_mode/MlSetUpgradeModeResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/setupgrademode/set_upgrade_mode.go b/typedapi/ml/setupgrademode/set_upgrade_mode.go index 75f7444677..824d24df20 100644 --- a/typedapi/ml/setupgrademode/set_upgrade_mode.go +++ b/typedapi/ml/setupgrademode/set_upgrade_mode.go @@ -16,10 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Sets a cluster wide upgrade_mode setting that prepares machine learning // indices for an upgrade. +// When upgrading your cluster, in some circumstances you must restart your +// nodes and reindex your machine learning indices. In those circumstances, +// there must be no machine learning jobs running. You can close the machine +// learning jobs, do the upgrade, then open all the jobs again. Alternatively, +// you can use this API to temporarily halt tasks associated with the jobs and +// datafeeds and prevent new jobs from opening. You can also use this API +// during upgrades that do not require you to reindex your machine learning +// indices, though stopping jobs is not a requirement in that case. +// You can see the current value for the upgrade_mode setting by using the get +// machine learning info API. package setupgrademode import ( @@ -28,7 +38,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -72,6 +81,16 @@ func NewSetUpgradeModeFunc(tp elastictransport.Interface) NewSetUpgradeMode { // Sets a cluster wide upgrade_mode setting that prepares machine learning // indices for an upgrade. +// When upgrading your cluster, in some circumstances you must restart your +// nodes and reindex your machine learning indices. In those circumstances, +// there must be no machine learning jobs running. You can close the machine +// learning jobs, do the upgrade, then open all the jobs again. Alternatively, +// you can use this API to temporarily halt tasks associated with the jobs and +// datafeeds and prevent new jobs from opening. You can also use this API +// during upgrades that do not require you to reindex your machine learning +// indices, though stopping jobs is not a requirement in that case. +// You can see the current value for the upgrade_mode setting by using the get +// machine learning info API. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-set-upgrade-mode.html func New(tp elastictransport.Interface) *SetUpgradeMode { @@ -251,7 +270,7 @@ func (r SetUpgradeMode) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -296,3 +315,47 @@ func (r *SetUpgradeMode) Timeout(duration string) *SetUpgradeMode { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *SetUpgradeMode) ErrorTrace(errortrace bool) *SetUpgradeMode { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *SetUpgradeMode) FilterPath(filterpaths ...string) *SetUpgradeMode { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *SetUpgradeMode) Human(human bool) *SetUpgradeMode { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *SetUpgradeMode) Pretty(pretty bool) *SetUpgradeMode { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/startdatafeed/request.go b/typedapi/ml/startdatafeed/request.go index e66e529f39..e48d1f4da0 100644 --- a/typedapi/ml/startdatafeed/request.go +++ b/typedapi/ml/startdatafeed/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package startdatafeed @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package startdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/start_datafeed/MlStartDatafeedRequest.ts#L24-L91 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/start_datafeed/MlStartDatafeedRequest.ts#L24-L91 type Request struct { // End Refer to the description for the `end` query parameter. @@ -46,6 +46,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/startdatafeed/response.go b/typedapi/ml/startdatafeed/response.go index 5b6125777d..f134484061 100644 --- a/typedapi/ml/startdatafeed/response.go +++ b/typedapi/ml/startdatafeed/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package startdatafeed @@ -31,7 +31,7 @@ import ( // Response holds the response body struct for the package startdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/start_datafeed/MlStartDatafeedResponse.ts#L22-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/start_datafeed/MlStartDatafeedResponse.ts#L22-L34 type Response struct { // Node The ID of the node that the job was started on. In serverless this will be @@ -81,7 +81,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "started": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ml/startdatafeed/start_datafeed.go b/typedapi/ml/startdatafeed/start_datafeed.go index 2ef59577dd..d2ac6b0f07 100644 --- a/typedapi/ml/startdatafeed/start_datafeed.go +++ b/typedapi/ml/startdatafeed/start_datafeed.go @@ -16,9 +16,28 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Starts one or more datafeeds. +// +// A datafeed must be started in order to retrieve data from Elasticsearch. A +// datafeed can be started and stopped +// multiple times throughout its lifecycle. +// +// Before you can start a datafeed, the anomaly detection job must be open. +// Otherwise, an error occurs. +// +// If you restart a stopped datafeed, it continues processing input data from +// the next millisecond after it was stopped. +// If new data was indexed for that exact millisecond between stopping and +// starting, it will be ignored. +// +// When Elasticsearch security features are enabled, your datafeed remembers +// which roles the last user to create or +// update it had at the time of creation or update and runs the query using +// those same roles. If you provided secondary +// authorization headers when you created or updated the datafeed, those +// credentials are used instead. package startdatafeed import ( @@ -30,6 +49,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -82,6 +102,25 @@ func NewStartDatafeedFunc(tp elastictransport.Interface) NewStartDatafeed { // Starts one or more datafeeds. // +// A datafeed must be started in order to retrieve data from Elasticsearch. A +// datafeed can be started and stopped +// multiple times throughout its lifecycle. +// +// Before you can start a datafeed, the anomaly detection job must be open. +// Otherwise, an error occurs. +// +// If you restart a stopped datafeed, it continues processing input data from +// the next millisecond after it was stopped. +// If new data was indexed for that exact millisecond between stopping and +// starting, it will be ignored. +// +// When Elasticsearch security features are enabled, your datafeed remembers +// which roles the last user to create or +// update it had at the time of creation or update and runs the query using +// those same roles. If you provided secondary +// authorization headers when you created or updated the datafeed, those +// credentials are used instead. +// // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-start-datafeed.html func New(tp elastictransport.Interface) *StartDatafeed { r := &StartDatafeed{ @@ -320,6 +359,50 @@ func (r *StartDatafeed) _datafeedid(datafeedid string) *StartDatafeed { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *StartDatafeed) ErrorTrace(errortrace bool) *StartDatafeed { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *StartDatafeed) FilterPath(filterpaths ...string) *StartDatafeed { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *StartDatafeed) Human(human bool) *StartDatafeed { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *StartDatafeed) Pretty(pretty bool) *StartDatafeed { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // End Refer to the description for the `end` query parameter. // API name: end func (r *StartDatafeed) End(datetime types.DateTime) *StartDatafeed { diff --git a/typedapi/ml/startdataframeanalytics/response.go b/typedapi/ml/startdataframeanalytics/response.go index ab134f0374..f0a1d22c0d 100644 --- a/typedapi/ml/startdataframeanalytics/response.go +++ b/typedapi/ml/startdataframeanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package startdataframeanalytics // Response holds the response body struct for the package startdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/start_data_frame_analytics/MlStartDataFrameAnalyticsResponse.ts#L22-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/start_data_frame_analytics/MlStartDataFrameAnalyticsResponse.ts#L22-L34 type Response struct { Acknowledged bool `json:"acknowledged"` // Node The ID of the node that the job was started on. If the job is allowed to open diff --git a/typedapi/ml/startdataframeanalytics/start_data_frame_analytics.go b/typedapi/ml/startdataframeanalytics/start_data_frame_analytics.go index 6bbd6a38c1..dc7a871c55 100644 --- a/typedapi/ml/startdataframeanalytics/start_data_frame_analytics.go +++ b/typedapi/ml/startdataframeanalytics/start_data_frame_analytics.go @@ -16,9 +16,20 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Starts a data frame analytics job. +// A data frame analytics job can be started and stopped multiple times +// throughout its lifecycle. +// If the destination index does not exist, it is created automatically the +// first time you start the data frame analytics job. The +// `index.number_of_shards` and `index.number_of_replicas` settings for the +// destination index are copied from the source index. If there are multiple +// source indices, the destination index copies the highest setting values. The +// mappings for the destination index are also copied from the source indices. +// If there are any mapping conflicts, the job fails to start. +// If the destination index exists, it is used as is. You can therefore set up +// the destination index in advance with custom settings and mappings. package startdataframeanalytics import ( @@ -27,9 +38,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -77,6 +88,17 @@ func NewStartDataFrameAnalyticsFunc(tp elastictransport.Interface) NewStartDataF } // Starts a data frame analytics job. +// A data frame analytics job can be started and stopped multiple times +// throughout its lifecycle. +// If the destination index does not exist, it is created automatically the +// first time you start the data frame analytics job. The +// `index.number_of_shards` and `index.number_of_replicas` settings for the +// destination index are copied from the source index. If there are multiple +// source indices, the destination index copies the highest setting values. The +// mappings for the destination index are also copied from the source indices. +// If there are any mapping conflicts, the job fails to start. +// If the destination index exists, it is used as is. You can therefore set up +// the destination index in advance with custom settings and mappings. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/start-dfanalytics.html func New(tp elastictransport.Interface) *StartDataFrameAnalytics { @@ -272,7 +294,7 @@ func (r StartDataFrameAnalytics) IsSuccess(providedCtx context.Context) (bool, e if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -319,3 +341,47 @@ func (r *StartDataFrameAnalytics) Timeout(duration string) *StartDataFrameAnalyt return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *StartDataFrameAnalytics) ErrorTrace(errortrace bool) *StartDataFrameAnalytics { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *StartDataFrameAnalytics) FilterPath(filterpaths ...string) *StartDataFrameAnalytics { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *StartDataFrameAnalytics) Human(human bool) *StartDataFrameAnalytics { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *StartDataFrameAnalytics) Pretty(pretty bool) *StartDataFrameAnalytics { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/starttrainedmodeldeployment/response.go b/typedapi/ml/starttrainedmodeldeployment/response.go index 0e31922680..1b0652a1a2 100644 --- a/typedapi/ml/starttrainedmodeldeployment/response.go +++ b/typedapi/ml/starttrainedmodeldeployment/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package starttrainedmodeldeployment @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package starttrainedmodeldeployment // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/start_trained_model_deployment/MlStartTrainedModelDeploymentResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/start_trained_model_deployment/MlStartTrainedModelDeploymentResponse.ts#L22-L26 type Response struct { Assignment types.TrainedModelAssignment `json:"assignment"` } diff --git a/typedapi/ml/starttrainedmodeldeployment/start_trained_model_deployment.go b/typedapi/ml/starttrainedmodeldeployment/start_trained_model_deployment.go index f7cccfc972..cbc4400431 100644 --- a/typedapi/ml/starttrainedmodeldeployment/start_trained_model_deployment.go +++ b/typedapi/ml/starttrainedmodeldeployment/start_trained_model_deployment.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Start a trained model deployment. +// Starts a trained model deployment, which allocates the model to every machine +// learning node. package starttrainedmodeldeployment import ( @@ -27,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -79,7 +79,8 @@ func NewStartTrainedModelDeploymentFunc(tp elastictransport.Interface) NewStartT } } -// Start a trained model deployment. +// Starts a trained model deployment, which allocates the model to every machine +// learning node. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/start-trained-model-deployment.html func New(tp elastictransport.Interface) *StartTrainedModelDeployment { @@ -275,7 +276,7 @@ func (r StartTrainedModelDeployment) IsSuccess(providedCtx context.Context) (boo if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -397,3 +398,47 @@ func (r *StartTrainedModelDeployment) WaitFor(waitfor deploymentallocationstate. return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *StartTrainedModelDeployment) ErrorTrace(errortrace bool) *StartTrainedModelDeployment { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *StartTrainedModelDeployment) FilterPath(filterpaths ...string) *StartTrainedModelDeployment { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *StartTrainedModelDeployment) Human(human bool) *StartTrainedModelDeployment { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *StartTrainedModelDeployment) Pretty(pretty bool) *StartTrainedModelDeployment { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/stopdatafeed/request.go b/typedapi/ml/stopdatafeed/request.go index 2ab7894d41..413156a27b 100644 --- a/typedapi/ml/stopdatafeed/request.go +++ b/typedapi/ml/stopdatafeed/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stopdatafeed @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package stopdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/stop_datafeed/MlStopDatafeedRequest.ts#L24-L78 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/stop_datafeed/MlStopDatafeedRequest.ts#L24-L78 type Request struct { // AllowNoMatch Refer to the description for the `allow_no_match` query parameter. @@ -47,6 +47,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -77,7 +78,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "allow_no_match": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +92,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "force": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ml/stopdatafeed/response.go b/typedapi/ml/stopdatafeed/response.go index 63354a42c5..c5cb4743d9 100644 --- a/typedapi/ml/stopdatafeed/response.go +++ b/typedapi/ml/stopdatafeed/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stopdatafeed // Response holds the response body struct for the package stopdatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/stop_datafeed/MlStopDatafeedResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/stop_datafeed/MlStopDatafeedResponse.ts#L20-L22 type Response struct { Stopped bool `json:"stopped"` } diff --git a/typedapi/ml/stopdatafeed/stop_datafeed.go b/typedapi/ml/stopdatafeed/stop_datafeed.go index 87a0fb5b88..d53e52ae56 100644 --- a/typedapi/ml/stopdatafeed/stop_datafeed.go +++ b/typedapi/ml/stopdatafeed/stop_datafeed.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Stops one or more datafeeds. +// A datafeed that is stopped ceases to retrieve data from Elasticsearch. A +// datafeed can be started and stopped +// multiple times throughout its lifecycle. package stopdatafeed import ( @@ -30,6 +33,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -81,6 +85,9 @@ func NewStopDatafeedFunc(tp elastictransport.Interface) NewStopDatafeed { } // Stops one or more datafeeds. +// A datafeed that is stopped ceases to retrieve data from Elasticsearch. A +// datafeed can be started and stopped +// multiple times throughout its lifecycle. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-stop-datafeed.html func New(tp elastictransport.Interface) *StopDatafeed { @@ -320,6 +327,50 @@ func (r *StopDatafeed) _datafeedid(datafeedid string) *StopDatafeed { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *StopDatafeed) ErrorTrace(errortrace bool) *StopDatafeed { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *StopDatafeed) FilterPath(filterpaths ...string) *StopDatafeed { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *StopDatafeed) Human(human bool) *StopDatafeed { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *StopDatafeed) Pretty(pretty bool) *StopDatafeed { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // AllowNoMatch Refer to the description for the `allow_no_match` query parameter. // API name: allow_no_match func (r *StopDatafeed) AllowNoMatch(allownomatch bool) *StopDatafeed { diff --git a/typedapi/ml/stopdataframeanalytics/response.go b/typedapi/ml/stopdataframeanalytics/response.go index a0fd142a77..8708759781 100644 --- a/typedapi/ml/stopdataframeanalytics/response.go +++ b/typedapi/ml/stopdataframeanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stopdataframeanalytics // Response holds the response body struct for the package stopdataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/stop_data_frame_analytics/MlStopDataFrameAnalyticsResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/stop_data_frame_analytics/MlStopDataFrameAnalyticsResponse.ts#L20-L22 type Response struct { Stopped bool `json:"stopped"` } diff --git a/typedapi/ml/stopdataframeanalytics/stop_data_frame_analytics.go b/typedapi/ml/stopdataframeanalytics/stop_data_frame_analytics.go index 502a2f92a0..452b135004 100644 --- a/typedapi/ml/stopdataframeanalytics/stop_data_frame_analytics.go +++ b/typedapi/ml/stopdataframeanalytics/stop_data_frame_analytics.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Stops one or more data frame analytics jobs. +// A data frame analytics job can be started and stopped multiple times +// throughout its lifecycle. package stopdataframeanalytics import ( @@ -27,7 +29,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -78,6 +79,8 @@ func NewStopDataFrameAnalyticsFunc(tp elastictransport.Interface) NewStopDataFra } // Stops one or more data frame analytics jobs. +// A data frame analytics job can be started and stopped multiple times +// throughout its lifecycle. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/stop-dfanalytics.html func New(tp elastictransport.Interface) *StopDataFrameAnalytics { @@ -273,7 +276,7 @@ func (r StopDataFrameAnalytics) IsSuccess(providedCtx context.Context) (bool, er if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -346,3 +349,47 @@ func (r *StopDataFrameAnalytics) Timeout(duration string) *StopDataFrameAnalytic return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *StopDataFrameAnalytics) ErrorTrace(errortrace bool) *StopDataFrameAnalytics { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *StopDataFrameAnalytics) FilterPath(filterpaths ...string) *StopDataFrameAnalytics { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *StopDataFrameAnalytics) Human(human bool) *StopDataFrameAnalytics { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *StopDataFrameAnalytics) Pretty(pretty bool) *StopDataFrameAnalytics { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/stoptrainedmodeldeployment/response.go b/typedapi/ml/stoptrainedmodeldeployment/response.go index 74f8796db8..4765d937a3 100644 --- a/typedapi/ml/stoptrainedmodeldeployment/response.go +++ b/typedapi/ml/stoptrainedmodeldeployment/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stoptrainedmodeldeployment // Response holds the response body struct for the package stoptrainedmodeldeployment // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/stop_trained_model_deployment/MlStopTrainedModelDeploymentResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/stop_trained_model_deployment/MlStopTrainedModelDeploymentResponse.ts#L20-L22 type Response struct { Stopped bool `json:"stopped"` } diff --git a/typedapi/ml/stoptrainedmodeldeployment/stop_trained_model_deployment.go b/typedapi/ml/stoptrainedmodeldeployment/stop_trained_model_deployment.go index 101b5abc76..9120e0e831 100644 --- a/typedapi/ml/stoptrainedmodeldeployment/stop_trained_model_deployment.go +++ b/typedapi/ml/stoptrainedmodeldeployment/stop_trained_model_deployment.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Stop a trained model deployment. +// Stops a trained model deployment. package stoptrainedmodeldeployment import ( @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -77,7 +76,7 @@ func NewStopTrainedModelDeploymentFunc(tp elastictransport.Interface) NewStopTra } } -// Stop a trained model deployment. +// Stops a trained model deployment. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/stop-trained-model-deployment.html func New(tp elastictransport.Interface) *StopTrainedModelDeployment { @@ -273,7 +272,7 @@ func (r StopTrainedModelDeployment) IsSuccess(providedCtx context.Context) (bool if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -335,3 +334,47 @@ func (r *StopTrainedModelDeployment) Force(force bool) *StopTrainedModelDeployme return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *StopTrainedModelDeployment) ErrorTrace(errortrace bool) *StopTrainedModelDeployment { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *StopTrainedModelDeployment) FilterPath(filterpaths ...string) *StopTrainedModelDeployment { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *StopTrainedModelDeployment) Human(human bool) *StopTrainedModelDeployment { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *StopTrainedModelDeployment) Pretty(pretty bool) *StopTrainedModelDeployment { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/updatedatafeed/request.go b/typedapi/ml/updatedatafeed/request.go index 12d36b11cd..bf5b674988 100644 --- a/typedapi/ml/updatedatafeed/request.go +++ b/typedapi/ml/updatedatafeed/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatedatafeed @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package updatedatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/update_datafeed/MlUpdateDatafeedRequest.ts#L31-L162 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/update_datafeed/MlUpdateDatafeedRequest.ts#L31-L162 type Request struct { // Aggregations If set, the datafeed performs aggregation searches. Support for aggregations @@ -123,6 +123,7 @@ func NewRequest() *Request { Aggregations: make(map[string]types.Aggregations, 0), ScriptFields: make(map[string]types.ScriptField, 0), } + return r } @@ -192,7 +193,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "max_empty_searches": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -231,7 +232,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "scroll_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ml/updatedatafeed/response.go b/typedapi/ml/updatedatafeed/response.go index 26821a95b8..2c0b943ace 100644 --- a/typedapi/ml/updatedatafeed/response.go +++ b/typedapi/ml/updatedatafeed/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatedatafeed @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatedatafeed // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/update_datafeed/MlUpdateDatafeedResponse.ts#L31-L49 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/update_datafeed/MlUpdateDatafeedResponse.ts#L31-L49 type Response struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` Authorization *types.DatafeedAuthorization `json:"authorization,omitempty"` diff --git a/typedapi/ml/updatedatafeed/update_datafeed.go b/typedapi/ml/updatedatafeed/update_datafeed.go index 3c09b608a0..dc8bb40864 100644 --- a/typedapi/ml/updatedatafeed/update_datafeed.go +++ b/typedapi/ml/updatedatafeed/update_datafeed.go @@ -16,9 +16,15 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Updates certain properties of a datafeed. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Updates the properties of a datafeed. +// You must stop and start the datafeed for the changes to be applied. +// When Elasticsearch security features are enabled, your datafeed remembers +// which roles the user who updated it had at +// the time of the update and runs the query using those same roles. If you +// provide secondary authorization headers, +// those credentials are used instead. package updatedatafeed import ( @@ -82,7 +88,13 @@ func NewUpdateDatafeedFunc(tp elastictransport.Interface) NewUpdateDatafeed { } } -// Updates certain properties of a datafeed. +// Updates the properties of a datafeed. +// You must stop and start the datafeed for the changes to be applied. +// When Elasticsearch security features are enabled, your datafeed remembers +// which roles the user who updated it had at +// the time of the update and runs the query using those same roles. If you +// provide secondary authorization headers, +// those credentials are used instead. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-datafeed.html func New(tp elastictransport.Interface) *UpdateDatafeed { @@ -371,6 +383,50 @@ func (r *UpdateDatafeed) IgnoreUnavailable(ignoreunavailable bool) *UpdateDatafe return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *UpdateDatafeed) ErrorTrace(errortrace bool) *UpdateDatafeed { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *UpdateDatafeed) FilterPath(filterpaths ...string) *UpdateDatafeed { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *UpdateDatafeed) Human(human bool) *UpdateDatafeed { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *UpdateDatafeed) Pretty(pretty bool) *UpdateDatafeed { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Aggregations If set, the datafeed performs aggregation searches. Support for aggregations // is limited and should be used only // with low cardinality data. diff --git a/typedapi/ml/updatedataframeanalytics/request.go b/typedapi/ml/updatedataframeanalytics/request.go index ba8bc62052..df7ced3fd4 100644 --- a/typedapi/ml/updatedataframeanalytics/request.go +++ b/typedapi/ml/updatedataframeanalytics/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatedataframeanalytics @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package updatedataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/update_data_frame_analytics/MlUpdateDataFrameAnalyticsRequest.ts#L24-L72 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/update_data_frame_analytics/MlUpdateDataFrameAnalyticsRequest.ts#L24-L72 type Request struct { // AllowLazyStart Specifies whether this job can start when there is insufficient machine @@ -51,6 +51,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/updatedataframeanalytics/response.go b/typedapi/ml/updatedataframeanalytics/response.go index 1a9edc223c..90d7182547 100644 --- a/typedapi/ml/updatedataframeanalytics/response.go +++ b/typedapi/ml/updatedataframeanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatedataframeanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatedataframeanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/update_data_frame_analytics/MlUpdateDataFrameAnalyticsResponse.ts#L30-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/update_data_frame_analytics/MlUpdateDataFrameAnalyticsResponse.ts#L30-L45 type Response struct { AllowLazyStart bool `json:"allow_lazy_start"` Analysis types.DataframeAnalysisContainer `json:"analysis"` diff --git a/typedapi/ml/updatedataframeanalytics/update_data_frame_analytics.go b/typedapi/ml/updatedataframeanalytics/update_data_frame_analytics.go index 64ae0945c7..b342ee1d75 100644 --- a/typedapi/ml/updatedataframeanalytics/update_data_frame_analytics.go +++ b/typedapi/ml/updatedataframeanalytics/update_data_frame_analytics.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Updates certain properties of a data frame analytics job. +// Updates an existing data frame analytics job. package updatedataframeanalytics import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -80,7 +81,7 @@ func NewUpdateDataFrameAnalyticsFunc(tp elastictransport.Interface) NewUpdateDat } } -// Updates certain properties of a data frame analytics job. +// Updates an existing data frame analytics job. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/update-dfanalytics.html func New(tp elastictransport.Interface) *UpdateDataFrameAnalytics { @@ -320,6 +321,50 @@ func (r *UpdateDataFrameAnalytics) _id(id string) *UpdateDataFrameAnalytics { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *UpdateDataFrameAnalytics) ErrorTrace(errortrace bool) *UpdateDataFrameAnalytics { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *UpdateDataFrameAnalytics) FilterPath(filterpaths ...string) *UpdateDataFrameAnalytics { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *UpdateDataFrameAnalytics) Human(human bool) *UpdateDataFrameAnalytics { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *UpdateDataFrameAnalytics) Pretty(pretty bool) *UpdateDataFrameAnalytics { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // AllowLazyStart Specifies whether this job can start when there is insufficient machine // learning node capacity for it to be immediately assigned to a node. // API name: allow_lazy_start diff --git a/typedapi/ml/updatefilter/request.go b/typedapi/ml/updatefilter/request.go index b534bc7153..2be189299a 100644 --- a/typedapi/ml/updatefilter/request.go +++ b/typedapi/ml/updatefilter/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatefilter @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package updatefilter // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/update_filter/MlUpdateFilterRequest.ts#L23-L51 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/update_filter/MlUpdateFilterRequest.ts#L23-L51 type Request struct { // AddItems The items to add to the filter. @@ -41,6 +41,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/updatefilter/response.go b/typedapi/ml/updatefilter/response.go index c441501093..5fa6a86925 100644 --- a/typedapi/ml/updatefilter/response.go +++ b/typedapi/ml/updatefilter/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatefilter // Response holds the response body struct for the package updatefilter // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/update_filter/MlUpdateFilterResponse.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/update_filter/MlUpdateFilterResponse.ts#L22-L28 type Response struct { Description string `json:"description"` FilterId string `json:"filter_id"` diff --git a/typedapi/ml/updatefilter/update_filter.go b/typedapi/ml/updatefilter/update_filter.go index c0a2424a73..7fe7442301 100644 --- a/typedapi/ml/updatefilter/update_filter.go +++ b/typedapi/ml/updatefilter/update_filter.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Updates the description of a filter, adds items, or removes items. +// Updates the description of a filter, adds items, or removes items from the +// list. package updatefilter import ( @@ -30,6 +31,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -80,7 +82,8 @@ func NewUpdateFilterFunc(tp elastictransport.Interface) NewUpdateFilter { } } -// Updates the description of a filter, adds items, or removes items. +// Updates the description of a filter, adds items, or removes items from the +// list. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-update-filter.html func New(tp elastictransport.Interface) *UpdateFilter { @@ -316,6 +319,50 @@ func (r *UpdateFilter) _filterid(filterid string) *UpdateFilter { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *UpdateFilter) ErrorTrace(errortrace bool) *UpdateFilter { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *UpdateFilter) FilterPath(filterpaths ...string) *UpdateFilter { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *UpdateFilter) Human(human bool) *UpdateFilter { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *UpdateFilter) Pretty(pretty bool) *UpdateFilter { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // AddItems The items to add to the filter. // API name: add_items func (r *UpdateFilter) AddItems(additems ...string) *UpdateFilter { diff --git a/typedapi/ml/updatejob/request.go b/typedapi/ml/updatejob/request.go index ee6e6b590e..00fa464069 100644 --- a/typedapi/ml/updatejob/request.go +++ b/typedapi/ml/updatejob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatejob @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package updatejob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/update_job/MlUpdateJobRequest.ts#L33-L138 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/update_job/MlUpdateJobRequest.ts#L33-L138 type Request struct { // AllowLazyOpen Advanced configuration option. Specifies whether this job can open when @@ -103,6 +103,7 @@ func NewRequest() *Request { r := &Request{ CustomSettings: make(map[string]json.RawMessage, 0), } + return r } @@ -133,7 +134,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "allow_lazy_open": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -170,7 +171,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "daily_model_snapshot_retention_after_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -217,7 +218,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "model_snapshot_retention_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -237,7 +238,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "renormalization_window_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -252,7 +253,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "results_retention_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ml/updatejob/response.go b/typedapi/ml/updatejob/response.go index b357b04fc1..cf0737e656 100644 --- a/typedapi/ml/updatejob/response.go +++ b/typedapi/ml/updatejob/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatejob @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatejob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/update_job/MlUpdateJobResponse.ts#L29-L53 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/update_job/MlUpdateJobResponse.ts#L29-L53 type Response struct { AllowLazyOpen bool `json:"allow_lazy_open"` AnalysisConfig types.AnalysisConfigRead `json:"analysis_config"` diff --git a/typedapi/ml/updatejob/update_job.go b/typedapi/ml/updatejob/update_job.go index 6272fef320..211df9ca08 100644 --- a/typedapi/ml/updatejob/update_job.go +++ b/typedapi/ml/updatejob/update_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Updates certain properties of an anomaly detection job. package updatejob @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -316,6 +317,50 @@ func (r *UpdateJob) _jobid(jobid string) *UpdateJob { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *UpdateJob) ErrorTrace(errortrace bool) *UpdateJob { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *UpdateJob) FilterPath(filterpaths ...string) *UpdateJob { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *UpdateJob) Human(human bool) *UpdateJob { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *UpdateJob) Pretty(pretty bool) *UpdateJob { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // AllowLazyOpen Advanced configuration option. Specifies whether this job can open when // there is insufficient machine learning node capacity for it to be // immediately assigned to a node. If `false` and a machine learning node diff --git a/typedapi/ml/updatemodelsnapshot/request.go b/typedapi/ml/updatemodelsnapshot/request.go index 3a0a0b5f84..b203dd9cc7 100644 --- a/typedapi/ml/updatemodelsnapshot/request.go +++ b/typedapi/ml/updatemodelsnapshot/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatemodelsnapshot @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package updatemodelsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/update_model_snapshot/MlUpdateModelSnapshotRequest.ts#L23-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/update_model_snapshot/MlUpdateModelSnapshotRequest.ts#L23-L54 type Request struct { // Description A description of the model snapshot. @@ -41,6 +41,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/ml/updatemodelsnapshot/response.go b/typedapi/ml/updatemodelsnapshot/response.go index d929e8cc21..d88448ffd7 100644 --- a/typedapi/ml/updatemodelsnapshot/response.go +++ b/typedapi/ml/updatemodelsnapshot/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatemodelsnapshot @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatemodelsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/update_model_snapshot/MlUpdateModelSnapshotResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/update_model_snapshot/MlUpdateModelSnapshotResponse.ts#L22-L27 type Response struct { Acknowledged bool `json:"acknowledged"` Model types.ModelSnapshot `json:"model"` diff --git a/typedapi/ml/updatemodelsnapshot/update_model_snapshot.go b/typedapi/ml/updatemodelsnapshot/update_model_snapshot.go index 7223090bbb..d917a8cedf 100644 --- a/typedapi/ml/updatemodelsnapshot/update_model_snapshot.go +++ b/typedapi/ml/updatemodelsnapshot/update_model_snapshot.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Updates certain properties of a snapshot. package updatemodelsnapshot @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -338,6 +339,50 @@ func (r *UpdateModelSnapshot) _snapshotid(snapshotid string) *UpdateModelSnapsho return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *UpdateModelSnapshot) ErrorTrace(errortrace bool) *UpdateModelSnapshot { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *UpdateModelSnapshot) FilterPath(filterpaths ...string) *UpdateModelSnapshot { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *UpdateModelSnapshot) Human(human bool) *UpdateModelSnapshot { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *UpdateModelSnapshot) Pretty(pretty bool) *UpdateModelSnapshot { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Description A description of the model snapshot. // API name: description func (r *UpdateModelSnapshot) Description(description string) *UpdateModelSnapshot { diff --git a/typedapi/ml/updatetrainedmodeldeployment/request.go b/typedapi/ml/updatetrainedmodeldeployment/request.go new file mode 100644 index 0000000000..b40e035a3f --- /dev/null +++ b/typedapi/ml/updatetrainedmodeldeployment/request.go @@ -0,0 +1,60 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package updatetrainedmodeldeployment + +import ( + "encoding/json" + "fmt" +) + +// Request holds the request body struct for the package updatetrainedmodeldeployment +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/update_trained_model_deployment/MlUpdateTrainedModelDeploymentRequest.ts#L24-L62 +type Request struct { + + // NumberOfAllocations The number of model allocations on each node where the model is deployed. + // All allocations on a node share the same copy of the model in memory but use + // a separate set of threads to evaluate the model. + // Increasing this value generally increases the throughput. + // If this setting is greater than the number of hardware threads + // it will automatically be changed to a value less than the number of hardware + // threads. + NumberOfAllocations *int `json:"number_of_allocations,omitempty"` +} + +// NewRequest returns a Request +func NewRequest() *Request { + r := &Request{} + + return r +} + +// FromJSON allows to load an arbitrary json into the request structure +func (r *Request) FromJSON(data string) (*Request, error) { + var req Request + err := json.Unmarshal([]byte(data), &req) + + if err != nil { + return nil, fmt.Errorf("could not deserialise json into Updatetrainedmodeldeployment request: %w", err) + } + + return &req, nil +} diff --git a/typedapi/ml/updatetrainedmodeldeployment/response.go b/typedapi/ml/updatetrainedmodeldeployment/response.go new file mode 100644 index 0000000000..1907ebccfa --- /dev/null +++ b/typedapi/ml/updatetrainedmodeldeployment/response.go @@ -0,0 +1,38 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package updatetrainedmodeldeployment + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +// Response holds the response body struct for the package updatetrainedmodeldeployment +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/update_trained_model_deployment/MlUpdateTrainedModelDeploymentResponse.ts#L22-L26 +type Response struct { + Assignment types.TrainedModelAssignment `json:"assignment"` +} + +// NewResponse returns a Response +func NewResponse() *Response { + r := &Response{} + return r +} diff --git a/typedapi/ml/updatetrainedmodeldeployment/update_trained_model_deployment.go b/typedapi/ml/updatetrainedmodeldeployment/update_trained_model_deployment.go new file mode 100644 index 0000000000..b8fb2cf323 --- /dev/null +++ b/typedapi/ml/updatetrainedmodeldeployment/update_trained_model_deployment.go @@ -0,0 +1,381 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Starts a trained model deployment, which allocates the model to every machine +// learning node. +package updatetrainedmodeldeployment + +import ( + gobytes "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strconv" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +const ( + modelidMask = iota + 1 +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type UpdateTrainedModelDeployment struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + raw io.Reader + + req *Request + deferred []func(request *Request) error + buf *gobytes.Buffer + + paramSet int + + modelid string + + spanStarted bool + + instrument elastictransport.Instrumentation +} + +// NewUpdateTrainedModelDeployment type alias for index. +type NewUpdateTrainedModelDeployment func(modelid string) *UpdateTrainedModelDeployment + +// NewUpdateTrainedModelDeploymentFunc returns a new instance of UpdateTrainedModelDeployment with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewUpdateTrainedModelDeploymentFunc(tp elastictransport.Interface) NewUpdateTrainedModelDeployment { + return func(modelid string) *UpdateTrainedModelDeployment { + n := New(tp) + + n._modelid(modelid) + + return n + } +} + +// Starts a trained model deployment, which allocates the model to every machine +// learning node. +// +// https://www.elastic.co/guide/en/elasticsearch/reference/current/update-trained-model-deployment.html +func New(tp elastictransport.Interface) *UpdateTrainedModelDeployment { + r := &UpdateTrainedModelDeployment{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + + buf: gobytes.NewBuffer(nil), + + req: NewRequest(), + } + + if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { + if instrument := instrumented.InstrumentationEnabled(); instrument != nil { + r.instrument = instrument + } + } + + return r +} + +// Raw takes a json payload as input which is then passed to the http.Request +// If specified Raw takes precedence on Request method. +func (r *UpdateTrainedModelDeployment) Raw(raw io.Reader) *UpdateTrainedModelDeployment { + r.raw = raw + + return r +} + +// Request allows to set the request property with the appropriate payload. +func (r *UpdateTrainedModelDeployment) Request(req *Request) *UpdateTrainedModelDeployment { + r.req = req + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *UpdateTrainedModelDeployment) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + if len(r.deferred) > 0 { + for _, f := range r.deferred { + deferredErr := f(r.req) + if deferredErr != nil { + return nil, deferredErr + } + } + } + + if r.raw == nil && r.req != nil { + + data, err := json.Marshal(r.req) + + if err != nil { + return nil, fmt.Errorf("could not serialise request for UpdateTrainedModelDeployment: %w", err) + } + + r.buf.Write(data) + + } + + if r.buf.Len() > 0 { + r.raw = r.buf + } + + r.path.Scheme = "http" + + switch { + case r.paramSet == modelidMask: + path.WriteString("/") + path.WriteString("_ml") + path.WriteString("/") + path.WriteString("trained_models") + path.WriteString("/") + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordPathPart(ctx, "modelid", r.modelid) + } + path.WriteString(r.modelid) + path.WriteString("/") + path.WriteString("deployment") + path.WriteString("/") + path.WriteString("_update") + + method = http.MethodPost + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.raw) + } else { + req, err = http.NewRequest(method, r.path.String(), r.raw) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Content-Type") == "" { + if r.raw != nil { + req.Header.Set("Content-Type", "application/vnd.elasticsearch+json;compatible-with=8") + } + } + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r UpdateTrainedModelDeployment) Perform(providedCtx context.Context) (*http.Response, error) { + var ctx context.Context + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + if r.spanStarted == false { + ctx := instrument.Start(providedCtx, "ml.update_trained_model_deployment") + defer instrument.Close(ctx) + } + } + if ctx == nil { + ctx = providedCtx + } + + req, err := r.HttpRequest(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.BeforeRequest(req, "ml.update_trained_model_deployment") + if reader := instrument.RecordRequestBody(ctx, "ml.update_trained_model_deployment", r.raw); reader != nil { + req.Body = reader + } + } + res, err := r.transport.Perform(req) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "ml.update_trained_model_deployment") + } + if err != nil { + localErr := fmt.Errorf("an error happened during the UpdateTrainedModelDeployment query execution: %w", err) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, localErr) + } + return nil, localErr + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a updatetrainedmodeldeployment.Response +func (r UpdateTrainedModelDeployment) Do(providedCtx context.Context) (*Response, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "ml.update_trained_model_deployment") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + response := NewResponse() + + res, err := r.Perform(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + defer res.Body.Close() + + if res.StatusCode < 299 { + err = json.NewDecoder(res.Body).Decode(response) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + return response, nil + } + + errorResponse := types.NewElasticsearchError() + err = json.NewDecoder(res.Body).Decode(errorResponse) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if errorResponse.Status == 0 { + errorResponse.Status = res.StatusCode + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, errorResponse) + } + return nil, errorResponse +} + +// Header set a key, value pair in the UpdateTrainedModelDeployment headers map. +func (r *UpdateTrainedModelDeployment) Header(key, value string) *UpdateTrainedModelDeployment { + r.headers.Set(key, value) + + return r +} + +// ModelId The unique identifier of the trained model. Currently, only PyTorch models +// are supported. +// API Name: modelid +func (r *UpdateTrainedModelDeployment) _modelid(modelid string) *UpdateTrainedModelDeployment { + r.paramSet |= modelidMask + r.modelid = modelid + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *UpdateTrainedModelDeployment) ErrorTrace(errortrace bool) *UpdateTrainedModelDeployment { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *UpdateTrainedModelDeployment) FilterPath(filterpaths ...string) *UpdateTrainedModelDeployment { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *UpdateTrainedModelDeployment) Human(human bool) *UpdateTrainedModelDeployment { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *UpdateTrainedModelDeployment) Pretty(pretty bool) *UpdateTrainedModelDeployment { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + +// NumberOfAllocations The number of model allocations on each node where the model is deployed. +// All allocations on a node share the same copy of the model in memory but use +// a separate set of threads to evaluate the model. +// Increasing this value generally increases the throughput. +// If this setting is greater than the number of hardware threads +// it will automatically be changed to a value less than the number of hardware +// threads. +// API name: number_of_allocations +func (r *UpdateTrainedModelDeployment) NumberOfAllocations(numberofallocations int) *UpdateTrainedModelDeployment { + r.req.NumberOfAllocations = &numberofallocations + + return r +} diff --git a/typedapi/ml/upgradejobsnapshot/response.go b/typedapi/ml/upgradejobsnapshot/response.go index a1e13e87e6..26f1aad9dc 100644 --- a/typedapi/ml/upgradejobsnapshot/response.go +++ b/typedapi/ml/upgradejobsnapshot/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package upgradejobsnapshot // Response holds the response body struct for the package upgradejobsnapshot // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/upgrade_job_snapshot/MlUpgradeJobSnapshotResponse.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/upgrade_job_snapshot/MlUpgradeJobSnapshotResponse.ts#L22-L31 type Response struct { // Completed When true, this means the task is complete. When false, it is still running. diff --git a/typedapi/ml/upgradejobsnapshot/upgrade_job_snapshot.go b/typedapi/ml/upgradejobsnapshot/upgrade_job_snapshot.go index 44326f69cb..2df93245ed 100644 --- a/typedapi/ml/upgradejobsnapshot/upgrade_job_snapshot.go +++ b/typedapi/ml/upgradejobsnapshot/upgrade_job_snapshot.go @@ -16,9 +16,17 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Upgrades a given job snapshot to the current major version. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Upgrades an anomaly detection model snapshot to the latest major version. +// Over time, older snapshot formats are deprecated and removed. Anomaly +// detection jobs support only snapshots that are from the current or previous +// major version. +// This API provides a means to upgrade a snapshot to the current major version. +// This aids in preparing the cluster for an upgrade to the next major version. +// Only one snapshot per anomaly detection job can be upgraded at a time and the +// upgraded snapshot cannot be the current snapshot of the anomaly detection +// job. package upgradejobsnapshot import ( @@ -27,7 +35,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -82,7 +89,15 @@ func NewUpgradeJobSnapshotFunc(tp elastictransport.Interface) NewUpgradeJobSnaps } } -// Upgrades a given job snapshot to the current major version. +// Upgrades an anomaly detection model snapshot to the latest major version. +// Over time, older snapshot formats are deprecated and removed. Anomaly +// detection jobs support only snapshots that are from the current or previous +// major version. +// This API provides a means to upgrade a snapshot to the current major version. +// This aids in preparing the cluster for an upgrade to the next major version. +// Only one snapshot per anomaly detection job can be upgraded at a time and the +// upgraded snapshot cannot be the current snapshot of the anomaly detection +// job. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-upgrade-job-model-snapshot.html func New(tp elastictransport.Interface) *UpgradeJobSnapshot { @@ -278,7 +293,7 @@ func (r UpgradeJobSnapshot) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -340,3 +355,47 @@ func (r *UpgradeJobSnapshot) Timeout(duration string) *UpgradeJobSnapshot { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *UpgradeJobSnapshot) ErrorTrace(errortrace bool) *UpgradeJobSnapshot { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *UpgradeJobSnapshot) FilterPath(filterpaths ...string) *UpgradeJobSnapshot { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *UpgradeJobSnapshot) Human(human bool) *UpgradeJobSnapshot { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *UpgradeJobSnapshot) Pretty(pretty bool) *UpgradeJobSnapshot { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ml/validate/request.go b/typedapi/ml/validate/request.go index d48755d807..138f300164 100644 --- a/typedapi/ml/validate/request.go +++ b/typedapi/ml/validate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package validate @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package validate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/validate/MlValidateJobRequest.ts#L27-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/validate/MlValidateJobRequest.ts#L27-L44 type Request struct { AnalysisConfig *types.AnalysisConfig `json:"analysis_config,omitempty"` AnalysisLimits *types.AnalysisLimits `json:"analysis_limits,omitempty"` @@ -49,6 +49,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -121,7 +122,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "model_snapshot_retention_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/ml/validate/response.go b/typedapi/ml/validate/response.go index 4151e14925..aec01518f8 100644 --- a/typedapi/ml/validate/response.go +++ b/typedapi/ml/validate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package validate // Response holds the response body struct for the package validate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/validate/MlValidateJobResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/validate/MlValidateJobResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/validate/validate.go b/typedapi/ml/validate/validate.go index 37f2ede4fb..71469f6e12 100644 --- a/typedapi/ml/validate/validate.go +++ b/typedapi/ml/validate/validate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Validates an anomaly detection job. package validate @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -293,6 +294,50 @@ func (r *Validate) Header(key, value string) *Validate { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Validate) ErrorTrace(errortrace bool) *Validate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Validate) FilterPath(filterpaths ...string) *Validate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Validate) Human(human bool) *Validate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Validate) Pretty(pretty bool) *Validate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: analysis_config func (r *Validate) AnalysisConfig(analysisconfig *types.AnalysisConfig) *Validate { diff --git a/typedapi/ml/validatedetector/request.go b/typedapi/ml/validatedetector/request.go index ead09b3513..22aff4e9c5 100644 --- a/typedapi/ml/validatedetector/request.go +++ b/typedapi/ml/validatedetector/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package validatedetector @@ -26,5 +26,12 @@ import ( // Request holds the request body struct for the package validatedetector // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/validate_detector/MlValidateDetectorRequest.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/validate_detector/MlValidateDetectorRequest.ts#L23-L31 type Request = types.Detector + +// NewRequest returns a Request +func NewRequest() *Request { + r := types.NewDetector() + + return r +} diff --git a/typedapi/ml/validatedetector/response.go b/typedapi/ml/validatedetector/response.go index bba9a89a95..37fa6a1e43 100644 --- a/typedapi/ml/validatedetector/response.go +++ b/typedapi/ml/validatedetector/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package validatedetector // Response holds the response body struct for the package validatedetector // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/validate_detector/MlValidateDetectorResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/validate_detector/MlValidateDetectorResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/ml/validatedetector/validate_detector.go b/typedapi/ml/validatedetector/validate_detector.go index 319a18edd7..37aacf0573 100644 --- a/typedapi/ml/validatedetector/validate_detector.go +++ b/typedapi/ml/validatedetector/validate_detector.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Validates an anomaly detection detector. package validatedetector @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -83,6 +84,8 @@ func New(tp elastictransport.Interface) *ValidateDetector { headers: make(http.Header), buf: gobytes.NewBuffer(nil), + + req: NewRequest(), } if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { @@ -294,6 +297,50 @@ func (r *ValidateDetector) Header(key, value string) *ValidateDetector { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ValidateDetector) ErrorTrace(errortrace bool) *ValidateDetector { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ValidateDetector) FilterPath(filterpaths ...string) *ValidateDetector { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ValidateDetector) Human(human bool) *ValidateDetector { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ValidateDetector) Pretty(pretty bool) *ValidateDetector { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // ByFieldName The field used to split the data. In particular, this property is used for // analyzing the splits with respect to their own history. It is used for // finding unusual values in the context of the split. diff --git a/typedapi/monitoring/bulk/bulk.go b/typedapi/monitoring/bulk/bulk.go index cac603d5ce..e7b66eeb33 100644 --- a/typedapi/monitoring/bulk/bulk.go +++ b/typedapi/monitoring/bulk/bulk.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Used by the monitoring features to send monitoring data. package bulk @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -344,3 +345,47 @@ func (r *Bulk) Interval(duration string) *Bulk { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Bulk) ErrorTrace(errortrace bool) *Bulk { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Bulk) FilterPath(filterpaths ...string) *Bulk { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Bulk) Human(human bool) *Bulk { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Bulk) Pretty(pretty bool) *Bulk { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/monitoring/bulk/request.go b/typedapi/monitoring/bulk/request.go index 27a64a6bc9..ffd78b1c6d 100644 --- a/typedapi/monitoring/bulk/request.go +++ b/typedapi/monitoring/bulk/request.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package bulk // Request holds the request body struct for the package bulk // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/monitoring/bulk/BulkMonitoringRequest.ts#L24-L59 -type Request = []interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/monitoring/bulk/BulkMonitoringRequest.ts#L24-L59 +type Request = []any diff --git a/typedapi/monitoring/bulk/response.go b/typedapi/monitoring/bulk/response.go index 20abd853a2..f45e2d4765 100644 --- a/typedapi/monitoring/bulk/response.go +++ b/typedapi/monitoring/bulk/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package bulk @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package bulk // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/monitoring/bulk/BulkMonitoringResponse.ts#L23-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/monitoring/bulk/BulkMonitoringResponse.ts#L23-L32 type Response struct { Error *types.ErrorCause `json:"error,omitempty"` // Errors True if there is was an error diff --git a/typedapi/nodes/clearrepositoriesmeteringarchive/clear_repositories_metering_archive.go b/typedapi/nodes/clearrepositoriesmeteringarchive/clear_repositories_metering_archive.go index 370664219f..8eedd6cb70 100644 --- a/typedapi/nodes/clearrepositoriesmeteringarchive/clear_repositories_metering_archive.go +++ b/typedapi/nodes/clearrepositoriesmeteringarchive/clear_repositories_metering_archive.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Removes the archived repositories metering information present in the -// cluster. +// You can use this API to clear the archived repositories metering information +// in the cluster. package clearrepositoriesmeteringarchive import ( @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -82,8 +82,8 @@ func NewClearRepositoriesMeteringArchiveFunc(tp elastictransport.Interface) NewC } } -// Removes the archived repositories metering information present in the -// cluster. +// You can use this API to clear the archived repositories metering information +// in the cluster. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/clear-repositories-metering-archive-api.html func New(tp elastictransport.Interface) *ClearRepositoriesMeteringArchive { @@ -275,7 +275,7 @@ func (r ClearRepositoriesMeteringArchive) IsSuccess(providedCtx context.Context) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -324,3 +324,47 @@ func (r *ClearRepositoriesMeteringArchive) _maxarchiveversion(maxarchiveversion return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ClearRepositoriesMeteringArchive) ErrorTrace(errortrace bool) *ClearRepositoriesMeteringArchive { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ClearRepositoriesMeteringArchive) FilterPath(filterpaths ...string) *ClearRepositoriesMeteringArchive { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ClearRepositoriesMeteringArchive) Human(human bool) *ClearRepositoriesMeteringArchive { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ClearRepositoriesMeteringArchive) Pretty(pretty bool) *ClearRepositoriesMeteringArchive { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/nodes/clearrepositoriesmeteringarchive/response.go b/typedapi/nodes/clearrepositoriesmeteringarchive/response.go index 001c6d376d..5e8ec7297e 100644 --- a/typedapi/nodes/clearrepositoriesmeteringarchive/response.go +++ b/typedapi/nodes/clearrepositoriesmeteringarchive/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clearrepositoriesmeteringarchive @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearrepositoriesmeteringarchive // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/clear_repositories_metering_archive/ClearRepositoriesMeteringArchiveResponse.ts#L36-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/clear_repositories_metering_archive/ClearRepositoriesMeteringArchiveResponse.ts#L36-L38 type Response struct { // ClusterName Name of the cluster. Based on the [Cluster name diff --git a/typedapi/nodes/getrepositoriesmeteringinfo/get_repositories_metering_info.go b/typedapi/nodes/getrepositoriesmeteringinfo/get_repositories_metering_info.go index 2645f9725d..6c872a47c3 100644 --- a/typedapi/nodes/getrepositoriesmeteringinfo/get_repositories_metering_info.go +++ b/typedapi/nodes/getrepositoriesmeteringinfo/get_repositories_metering_info.go @@ -16,9 +16,15 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Returns cluster repositories metering information. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// You can use the cluster repositories metering API to retrieve repositories +// metering information in a cluster. +// This API exposes monotonically non-decreasing counters and it’s expected that +// clients would durably store the +// information needed to compute aggregations over a period of time. +// Additionally, the information exposed by this +// API is volatile, meaning that it won’t be present after node restarts. package getrepositoriesmeteringinfo import ( @@ -27,9 +33,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +82,13 @@ func NewGetRepositoriesMeteringInfoFunc(tp elastictransport.Interface) NewGetRep } } -// Returns cluster repositories metering information. +// You can use the cluster repositories metering API to retrieve repositories +// metering information in a cluster. +// This API exposes monotonically non-decreasing counters and it’s expected that +// clients would durably store the +// information needed to compute aggregations over a period of time. +// Additionally, the information exposed by this +// API is volatile, meaning that it won’t be present after node restarts. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-repositories-metering-api.html func New(tp elastictransport.Interface) *GetRepositoriesMeteringInfo { @@ -262,7 +274,7 @@ func (r GetRepositoriesMeteringInfo) IsSuccess(providedCtx context.Context) (boo if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -300,3 +312,47 @@ func (r *GetRepositoriesMeteringInfo) _nodeid(nodeid string) *GetRepositoriesMet return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetRepositoriesMeteringInfo) ErrorTrace(errortrace bool) *GetRepositoriesMeteringInfo { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetRepositoriesMeteringInfo) FilterPath(filterpaths ...string) *GetRepositoriesMeteringInfo { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetRepositoriesMeteringInfo) Human(human bool) *GetRepositoriesMeteringInfo { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetRepositoriesMeteringInfo) Pretty(pretty bool) *GetRepositoriesMeteringInfo { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/nodes/getrepositoriesmeteringinfo/response.go b/typedapi/nodes/getrepositoriesmeteringinfo/response.go index b498aa0ed7..5e60739497 100644 --- a/typedapi/nodes/getrepositoriesmeteringinfo/response.go +++ b/typedapi/nodes/getrepositoriesmeteringinfo/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getrepositoriesmeteringinfo @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrepositoriesmeteringinfo // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/get_repositories_metering_info/GetRepositoriesMeteringInfoResponse.ts#L36-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/get_repositories_metering_info/GetRepositoriesMeteringInfoResponse.ts#L36-L38 type Response struct { // ClusterName Name of the cluster. Based on the [Cluster name diff --git a/typedapi/nodes/hotthreads/hot_threads.go b/typedapi/nodes/hotthreads/hot_threads.go index 42b80c776c..1e1ec88b29 100644 --- a/typedapi/nodes/hotthreads/hot_threads.go +++ b/typedapi/nodes/hotthreads/hot_threads.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns information about hot threads on each node in the cluster. +// This API yields a breakdown of the hot threads on each selected node in the +// cluster. +// The output is plain text with a breakdown of each node’s top hot threads. package hotthreads import ( @@ -27,7 +29,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -76,7 +77,9 @@ func NewHotThreadsFunc(tp elastictransport.Interface) NewHotThreads { } } -// Returns information about hot threads on each node in the cluster. +// This API yields a breakdown of the hot threads on each selected node in the +// cluster. +// The output is plain text with a breakdown of each node’s top hot threads. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-nodes-hot-threads.html func New(tp elastictransport.Interface) *HotThreads { @@ -269,7 +272,7 @@ func (r HotThreads) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -373,3 +376,47 @@ func (r *HotThreads) Sort(sort threadtype.ThreadType) *HotThreads { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *HotThreads) ErrorTrace(errortrace bool) *HotThreads { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *HotThreads) FilterPath(filterpaths ...string) *HotThreads { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *HotThreads) Human(human bool) *HotThreads { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *HotThreads) Pretty(pretty bool) *HotThreads { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/nodes/hotthreads/response.go b/typedapi/nodes/hotthreads/response.go index 35060da875..d3bf6fc48a 100644 --- a/typedapi/nodes/hotthreads/response.go +++ b/typedapi/nodes/hotthreads/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package hotthreads @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package hotthreads // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/hot_threads/NodesHotThreadsResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/hot_threads/NodesHotThreadsResponse.ts#L22-L24 type Response struct { HotThreads []types.HotThread `json:"hot_threads"` } diff --git a/typedapi/nodes/info/info.go b/typedapi/nodes/info/info.go index e226e7da30..c54c58677f 100644 --- a/typedapi/nodes/info/info.go +++ b/typedapi/nodes/info/info.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns information about nodes in the cluster. +// Returns cluster nodes information. package info import ( @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -78,7 +77,7 @@ func NewInfoFunc(tp elastictransport.Interface) NewInfo { } } -// Returns information about nodes in the cluster. +// Returns cluster nodes information. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-nodes-info.html func New(tp elastictransport.Interface) *Info { @@ -295,7 +294,7 @@ func (r Info) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -367,3 +366,47 @@ func (r *Info) Timeout(duration string) *Info { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Info) ErrorTrace(errortrace bool) *Info { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Info) FilterPath(filterpaths ...string) *Info { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Info) Human(human bool) *Info { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Info) Pretty(pretty bool) *Info { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/nodes/info/response.go b/typedapi/nodes/info/response.go index c36d65b66e..57979be0b8 100644 --- a/typedapi/nodes/info/response.go +++ b/typedapi/nodes/info/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package info @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package info // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/NodesInfoResponse.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/NodesInfoResponse.ts#L30-L32 type Response struct { ClusterName string `json:"cluster_name"` // NodeStats Contains statistics about the number of nodes selected by the request’s node diff --git a/typedapi/nodes/reloadsecuresettings/reload_secure_settings.go b/typedapi/nodes/reloadsecuresettings/reload_secure_settings.go index 65a10a2a9a..bb0b96a681 100644 --- a/typedapi/nodes/reloadsecuresettings/reload_secure_settings.go +++ b/typedapi/nodes/reloadsecuresettings/reload_secure_settings.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Reloads secure settings. +// Reloads the keystore on nodes in the cluster. package reloadsecuresettings import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -78,7 +79,7 @@ func NewReloadSecureSettingsFunc(tp elastictransport.Interface) NewReloadSecureS } } -// Reloads secure settings. +// Reloads the keystore on nodes in the cluster. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/secure-settings.html#reloadable-secure-settings func New(tp elastictransport.Interface) *ReloadSecureSettings { @@ -329,6 +330,50 @@ func (r *ReloadSecureSettings) Timeout(duration string) *ReloadSecureSettings { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ReloadSecureSettings) ErrorTrace(errortrace bool) *ReloadSecureSettings { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ReloadSecureSettings) FilterPath(filterpaths ...string) *ReloadSecureSettings { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ReloadSecureSettings) Human(human bool) *ReloadSecureSettings { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ReloadSecureSettings) Pretty(pretty bool) *ReloadSecureSettings { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // SecureSettingsPassword The password for the Elasticsearch keystore. // API name: secure_settings_password func (r *ReloadSecureSettings) SecureSettingsPassword(password string) *ReloadSecureSettings { diff --git a/typedapi/nodes/reloadsecuresettings/request.go b/typedapi/nodes/reloadsecuresettings/request.go index 0cb449b234..91505415b2 100644 --- a/typedapi/nodes/reloadsecuresettings/request.go +++ b/typedapi/nodes/reloadsecuresettings/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package reloadsecuresettings @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package reloadsecuresettings // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/reload_secure_settings/ReloadSecureSettingsRequest.ts#L24-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/reload_secure_settings/ReloadSecureSettingsRequest.ts#L24-L50 type Request struct { // SecureSettingsPassword The password for the Elasticsearch keystore. @@ -37,6 +37,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/nodes/reloadsecuresettings/response.go b/typedapi/nodes/reloadsecuresettings/response.go index 6a6e0e2b01..5e76c878cc 100644 --- a/typedapi/nodes/reloadsecuresettings/response.go +++ b/typedapi/nodes/reloadsecuresettings/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package reloadsecuresettings @@ -32,7 +32,7 @@ import ( // Response holds the response body struct for the package reloadsecuresettings // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/reload_secure_settings/ReloadSecureSettingsResponse.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/reload_secure_settings/ReloadSecureSettingsResponse.ts#L30-L32 type Response struct { ClusterName string `json:"cluster_name"` // NodeStats Contains statistics about the number of nodes selected by the request’s node diff --git a/typedapi/nodes/stats/response.go b/typedapi/nodes/stats/response.go index 1c272df280..12a5836f2d 100644 --- a/typedapi/nodes/stats/response.go +++ b/typedapi/nodes/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/stats/NodesStatsResponse.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/stats/NodesStatsResponse.ts#L30-L32 type Response struct { ClusterName *string `json:"cluster_name,omitempty"` // NodeStats Contains statistics about the number of nodes selected by the request’s node diff --git a/typedapi/nodes/stats/stats.go b/typedapi/nodes/stats/stats.go index 1713d6e8c3..69327306bf 100644 --- a/typedapi/nodes/stats/stats.go +++ b/typedapi/nodes/stats/stats.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns statistical information about nodes in the cluster. +// Returns cluster nodes statistics. package stats import ( @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -82,7 +81,7 @@ func NewStatsFunc(tp elastictransport.Interface) NewStats { } } -// Returns statistical information about nodes in the cluster. +// Returns cluster nodes statistics. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-nodes-stats.html func New(tp elastictransport.Interface) *Stats { @@ -351,7 +350,7 @@ func (r Stats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -498,3 +497,47 @@ func (r *Stats) IncludeUnloadedSegments(includeunloadedsegments bool) *Stats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Stats) ErrorTrace(errortrace bool) *Stats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Stats) FilterPath(filterpaths ...string) *Stats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Stats) Human(human bool) *Stats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Stats) Pretty(pretty bool) *Stats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/nodes/usage/response.go b/typedapi/nodes/usage/response.go index da0c96d9fd..204a8ce350 100644 --- a/typedapi/nodes/usage/response.go +++ b/typedapi/nodes/usage/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package usage @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package usage // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/usage/NodesUsageResponse.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/usage/NodesUsageResponse.ts#L30-L32 type Response struct { ClusterName string `json:"cluster_name"` // NodeStats Contains statistics about the number of nodes selected by the request’s node diff --git a/typedapi/nodes/usage/usage.go b/typedapi/nodes/usage/usage.go index 3ea0dbf285..d5437f3661 100644 --- a/typedapi/nodes/usage/usage.go +++ b/typedapi/nodes/usage/usage.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns low-level information about REST actions usage on nodes. +// Returns information on the usage of features. package usage import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -77,7 +77,7 @@ func NewUsageFunc(tp elastictransport.Interface) NewUsage { } } -// Returns low-level information about REST actions usage on nodes. +// Returns information on the usage of features. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/cluster-nodes-usage.html func New(tp elastictransport.Interface) *Usage { @@ -302,7 +302,7 @@ func (r Usage) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -360,3 +360,47 @@ func (r *Usage) Timeout(duration string) *Usage { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Usage) ErrorTrace(errortrace bool) *Usage { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Usage) FilterPath(filterpaths ...string) *Usage { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Usage) Human(human bool) *Usage { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Usage) Pretty(pretty bool) *Usage { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/profiling/flamegraph/flamegraph.go b/typedapi/profiling/flamegraph/flamegraph.go new file mode 100644 index 0000000000..4bf667d4f5 --- /dev/null +++ b/typedapi/profiling/flamegraph/flamegraph.go @@ -0,0 +1,233 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Extracts a UI-optimized structure to render flamegraphs from Universal +// Profiling. +package flamegraph + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type Flamegraph struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + raw io.Reader + + paramSet int + + spanStarted bool + + instrument elastictransport.Instrumentation +} + +// NewFlamegraph type alias for index. +type NewFlamegraph func() *Flamegraph + +// NewFlamegraphFunc returns a new instance of Flamegraph with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewFlamegraphFunc(tp elastictransport.Interface) NewFlamegraph { + return func() *Flamegraph { + n := New(tp) + + return n + } +} + +// Extracts a UI-optimized structure to render flamegraphs from Universal +// Profiling. +// +// https://www.elastic.co/guide/en/observability/current/universal-profiling.html +func New(tp elastictransport.Interface) *Flamegraph { + r := &Flamegraph{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + } + + if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { + if instrument := instrumented.InstrumentationEnabled(); instrument != nil { + r.instrument = instrument + } + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *Flamegraph) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == 0: + path.WriteString("/") + path.WriteString("_profiling") + path.WriteString("/") + path.WriteString("flamegraph") + + method = http.MethodPost + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.raw) + } else { + req, err = http.NewRequest(method, r.path.String(), r.raw) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Content-Type") == "" { + if r.raw != nil { + req.Header.Set("Content-Type", "application/vnd.elasticsearch+json;compatible-with=8") + } + } + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r Flamegraph) Perform(providedCtx context.Context) (*http.Response, error) { + var ctx context.Context + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + if r.spanStarted == false { + ctx := instrument.Start(providedCtx, "profiling.flamegraph") + defer instrument.Close(ctx) + } + } + if ctx == nil { + ctx = providedCtx + } + + req, err := r.HttpRequest(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.BeforeRequest(req, "profiling.flamegraph") + if reader := instrument.RecordRequestBody(ctx, "profiling.flamegraph", r.raw); reader != nil { + req.Body = reader + } + } + res, err := r.transport.Perform(req) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "profiling.flamegraph") + } + if err != nil { + localErr := fmt.Errorf("an error happened during the Flamegraph query execution: %w", err) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, localErr) + } + return nil, localErr + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a flamegraph.Response +func (r Flamegraph) Do(ctx context.Context) (bool, error) { + return r.IsSuccess(ctx) +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r Flamegraph) IsSuccess(providedCtx context.Context) (bool, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "profiling.flamegraph") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(io.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + if res.StatusCode != 404 { + err := fmt.Errorf("an error happened during the Flamegraph query execution, status code: %d", res.StatusCode) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return false, err + } + + return false, nil +} + +// Header set a key, value pair in the Flamegraph headers map. +func (r *Flamegraph) Header(key, value string) *Flamegraph { + r.headers.Set(key, value) + + return r +} diff --git a/typedapi/profiling/stacktraces/stacktraces.go b/typedapi/profiling/stacktraces/stacktraces.go new file mode 100644 index 0000000000..47ea786c72 --- /dev/null +++ b/typedapi/profiling/stacktraces/stacktraces.go @@ -0,0 +1,231 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Extracts raw stacktrace information from Universal Profiling. +package stacktraces + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type Stacktraces struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + raw io.Reader + + paramSet int + + spanStarted bool + + instrument elastictransport.Instrumentation +} + +// NewStacktraces type alias for index. +type NewStacktraces func() *Stacktraces + +// NewStacktracesFunc returns a new instance of Stacktraces with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewStacktracesFunc(tp elastictransport.Interface) NewStacktraces { + return func() *Stacktraces { + n := New(tp) + + return n + } +} + +// Extracts raw stacktrace information from Universal Profiling. +// +// https://www.elastic.co/guide/en/observability/current/universal-profiling.html +func New(tp elastictransport.Interface) *Stacktraces { + r := &Stacktraces{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + } + + if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { + if instrument := instrumented.InstrumentationEnabled(); instrument != nil { + r.instrument = instrument + } + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *Stacktraces) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == 0: + path.WriteString("/") + path.WriteString("_profiling") + path.WriteString("/") + path.WriteString("stacktraces") + + method = http.MethodPost + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.raw) + } else { + req, err = http.NewRequest(method, r.path.String(), r.raw) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Content-Type") == "" { + if r.raw != nil { + req.Header.Set("Content-Type", "application/vnd.elasticsearch+json;compatible-with=8") + } + } + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r Stacktraces) Perform(providedCtx context.Context) (*http.Response, error) { + var ctx context.Context + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + if r.spanStarted == false { + ctx := instrument.Start(providedCtx, "profiling.stacktraces") + defer instrument.Close(ctx) + } + } + if ctx == nil { + ctx = providedCtx + } + + req, err := r.HttpRequest(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.BeforeRequest(req, "profiling.stacktraces") + if reader := instrument.RecordRequestBody(ctx, "profiling.stacktraces", r.raw); reader != nil { + req.Body = reader + } + } + res, err := r.transport.Perform(req) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "profiling.stacktraces") + } + if err != nil { + localErr := fmt.Errorf("an error happened during the Stacktraces query execution: %w", err) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, localErr) + } + return nil, localErr + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a stacktraces.Response +func (r Stacktraces) Do(ctx context.Context) (bool, error) { + return r.IsSuccess(ctx) +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r Stacktraces) IsSuccess(providedCtx context.Context) (bool, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "profiling.stacktraces") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(io.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + if res.StatusCode != 404 { + err := fmt.Errorf("an error happened during the Stacktraces query execution, status code: %d", res.StatusCode) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return false, err + } + + return false, nil +} + +// Header set a key, value pair in the Stacktraces headers map. +func (r *Stacktraces) Header(key, value string) *Stacktraces { + r.headers.Set(key, value) + + return r +} diff --git a/typedapi/profiling/status/status.go b/typedapi/profiling/status/status.go new file mode 100644 index 0000000000..5a23b68556 --- /dev/null +++ b/typedapi/profiling/status/status.go @@ -0,0 +1,225 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Returns basic information about the status of Universal Profiling. +package status + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type Status struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + raw io.Reader + + paramSet int + + spanStarted bool + + instrument elastictransport.Instrumentation +} + +// NewStatus type alias for index. +type NewStatus func() *Status + +// NewStatusFunc returns a new instance of Status with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewStatusFunc(tp elastictransport.Interface) NewStatus { + return func() *Status { + n := New(tp) + + return n + } +} + +// Returns basic information about the status of Universal Profiling. +// +// https://www.elastic.co/guide/en/observability/current/universal-profiling.html +func New(tp elastictransport.Interface) *Status { + r := &Status{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + } + + if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { + if instrument := instrumented.InstrumentationEnabled(); instrument != nil { + r.instrument = instrument + } + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *Status) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == 0: + path.WriteString("/") + path.WriteString("_profiling") + path.WriteString("/") + path.WriteString("status") + + method = http.MethodGet + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.raw) + } else { + req, err = http.NewRequest(method, r.path.String(), r.raw) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r Status) Perform(providedCtx context.Context) (*http.Response, error) { + var ctx context.Context + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + if r.spanStarted == false { + ctx := instrument.Start(providedCtx, "profiling.status") + defer instrument.Close(ctx) + } + } + if ctx == nil { + ctx = providedCtx + } + + req, err := r.HttpRequest(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.BeforeRequest(req, "profiling.status") + if reader := instrument.RecordRequestBody(ctx, "profiling.status", r.raw); reader != nil { + req.Body = reader + } + } + res, err := r.transport.Perform(req) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "profiling.status") + } + if err != nil { + localErr := fmt.Errorf("an error happened during the Status query execution: %w", err) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, localErr) + } + return nil, localErr + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a status.Response +func (r Status) Do(ctx context.Context) (bool, error) { + return r.IsSuccess(ctx) +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r Status) IsSuccess(providedCtx context.Context) (bool, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "profiling.status") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(io.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + if res.StatusCode != 404 { + err := fmt.Errorf("an error happened during the Status query execution, status code: %d", res.StatusCode) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return false, err + } + + return false, nil +} + +// Header set a key, value pair in the Status headers map. +func (r *Status) Header(key, value string) *Status { + r.headers.Set(key, value) + + return r +} diff --git a/typedapi/profiling/topnfunctions/topn_functions.go b/typedapi/profiling/topnfunctions/topn_functions.go new file mode 100644 index 0000000000..4659a8f5cf --- /dev/null +++ b/typedapi/profiling/topnfunctions/topn_functions.go @@ -0,0 +1,233 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Extracts a list of topN functions from Universal Profiling. +package topnfunctions + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type TopnFunctions struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + raw io.Reader + + paramSet int + + spanStarted bool + + instrument elastictransport.Instrumentation +} + +// NewTopnFunctions type alias for index. +type NewTopnFunctions func() *TopnFunctions + +// NewTopnFunctionsFunc returns a new instance of TopnFunctions with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewTopnFunctionsFunc(tp elastictransport.Interface) NewTopnFunctions { + return func() *TopnFunctions { + n := New(tp) + + return n + } +} + +// Extracts a list of topN functions from Universal Profiling. +// +// https://www.elastic.co/guide/en/observability/current/universal-profiling.html +func New(tp elastictransport.Interface) *TopnFunctions { + r := &TopnFunctions{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + } + + if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { + if instrument := instrumented.InstrumentationEnabled(); instrument != nil { + r.instrument = instrument + } + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *TopnFunctions) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == 0: + path.WriteString("/") + path.WriteString("_profiling") + path.WriteString("/") + path.WriteString("topn") + path.WriteString("/") + path.WriteString("functions") + + method = http.MethodPost + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.raw) + } else { + req, err = http.NewRequest(method, r.path.String(), r.raw) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Content-Type") == "" { + if r.raw != nil { + req.Header.Set("Content-Type", "application/vnd.elasticsearch+json;compatible-with=8") + } + } + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r TopnFunctions) Perform(providedCtx context.Context) (*http.Response, error) { + var ctx context.Context + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + if r.spanStarted == false { + ctx := instrument.Start(providedCtx, "profiling.topn_functions") + defer instrument.Close(ctx) + } + } + if ctx == nil { + ctx = providedCtx + } + + req, err := r.HttpRequest(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.BeforeRequest(req, "profiling.topn_functions") + if reader := instrument.RecordRequestBody(ctx, "profiling.topn_functions", r.raw); reader != nil { + req.Body = reader + } + } + res, err := r.transport.Perform(req) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "profiling.topn_functions") + } + if err != nil { + localErr := fmt.Errorf("an error happened during the TopnFunctions query execution: %w", err) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, localErr) + } + return nil, localErr + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a topnfunctions.Response +func (r TopnFunctions) Do(ctx context.Context) (bool, error) { + return r.IsSuccess(ctx) +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r TopnFunctions) IsSuccess(providedCtx context.Context) (bool, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "profiling.topn_functions") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(io.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + if res.StatusCode != 404 { + err := fmt.Errorf("an error happened during the TopnFunctions query execution, status code: %d", res.StatusCode) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return false, err + } + + return false, nil +} + +// Header set a key, value pair in the TopnFunctions headers map. +func (r *TopnFunctions) Header(key, value string) *TopnFunctions { + r.headers.Set(key, value) + + return r +} diff --git a/typedapi/queryrules/deleterule/delete_rule.go b/typedapi/queryrules/deleterule/delete_rule.go new file mode 100644 index 0000000000..90638db349 --- /dev/null +++ b/typedapi/queryrules/deleterule/delete_rule.go @@ -0,0 +1,365 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Deletes a query rule within a query ruleset. +package deleterule + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strconv" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +const ( + rulesetidMask = iota + 1 + + ruleidMask +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type DeleteRule struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + raw io.Reader + + paramSet int + + rulesetid string + ruleid string + + spanStarted bool + + instrument elastictransport.Instrumentation +} + +// NewDeleteRule type alias for index. +type NewDeleteRule func(rulesetid, ruleid string) *DeleteRule + +// NewDeleteRuleFunc returns a new instance of DeleteRule with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewDeleteRuleFunc(tp elastictransport.Interface) NewDeleteRule { + return func(rulesetid, ruleid string) *DeleteRule { + n := New(tp) + + n._rulesetid(rulesetid) + + n._ruleid(ruleid) + + return n + } +} + +// Deletes a query rule within a query ruleset. +// +// https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-query-rule.html +func New(tp elastictransport.Interface) *DeleteRule { + r := &DeleteRule{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + } + + if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { + if instrument := instrumented.InstrumentationEnabled(); instrument != nil { + r.instrument = instrument + } + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *DeleteRule) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == rulesetidMask|ruleidMask: + path.WriteString("/") + path.WriteString("_query_rules") + path.WriteString("/") + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordPathPart(ctx, "rulesetid", r.rulesetid) + } + path.WriteString(r.rulesetid) + path.WriteString("/") + path.WriteString("_rule") + path.WriteString("/") + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordPathPart(ctx, "ruleid", r.ruleid) + } + path.WriteString(r.ruleid) + + method = http.MethodDelete + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.raw) + } else { + req, err = http.NewRequest(method, r.path.String(), r.raw) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r DeleteRule) Perform(providedCtx context.Context) (*http.Response, error) { + var ctx context.Context + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + if r.spanStarted == false { + ctx := instrument.Start(providedCtx, "query_rules.delete_rule") + defer instrument.Close(ctx) + } + } + if ctx == nil { + ctx = providedCtx + } + + req, err := r.HttpRequest(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.BeforeRequest(req, "query_rules.delete_rule") + if reader := instrument.RecordRequestBody(ctx, "query_rules.delete_rule", r.raw); reader != nil { + req.Body = reader + } + } + res, err := r.transport.Perform(req) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "query_rules.delete_rule") + } + if err != nil { + localErr := fmt.Errorf("an error happened during the DeleteRule query execution: %w", err) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, localErr) + } + return nil, localErr + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a deleterule.Response +func (r DeleteRule) Do(providedCtx context.Context) (*Response, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "query_rules.delete_rule") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + response := NewResponse() + + res, err := r.Perform(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + defer res.Body.Close() + + if res.StatusCode < 299 { + err = json.NewDecoder(res.Body).Decode(response) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + return response, nil + } + + errorResponse := types.NewElasticsearchError() + err = json.NewDecoder(res.Body).Decode(errorResponse) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if errorResponse.Status == 0 { + errorResponse.Status = res.StatusCode + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, errorResponse) + } + return nil, errorResponse +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r DeleteRule) IsSuccess(providedCtx context.Context) (bool, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "query_rules.delete_rule") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(io.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + if res.StatusCode != 404 { + err := fmt.Errorf("an error happened during the DeleteRule query execution, status code: %d", res.StatusCode) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return false, err + } + + return false, nil +} + +// Header set a key, value pair in the DeleteRule headers map. +func (r *DeleteRule) Header(key, value string) *DeleteRule { + r.headers.Set(key, value) + + return r +} + +// RulesetId The unique identifier of the query ruleset containing the rule to delete +// API Name: rulesetid +func (r *DeleteRule) _rulesetid(rulesetid string) *DeleteRule { + r.paramSet |= rulesetidMask + r.rulesetid = rulesetid + + return r +} + +// RuleId The unique identifier of the query rule within the specified ruleset to +// delete +// API Name: ruleid +func (r *DeleteRule) _ruleid(ruleid string) *DeleteRule { + r.paramSet |= ruleidMask + r.ruleid = ruleid + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteRule) ErrorTrace(errortrace bool) *DeleteRule { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteRule) FilterPath(filterpaths ...string) *DeleteRule { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteRule) Human(human bool) *DeleteRule { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteRule) Pretty(pretty bool) *DeleteRule { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/inference/deletemodel/response.go b/typedapi/queryrules/deleterule/response.go similarity index 79% rename from typedapi/inference/deletemodel/response.go rename to typedapi/queryrules/deleterule/response.go index d95ea86dff..b8d5712186 100644 --- a/typedapi/inference/deletemodel/response.go +++ b/typedapi/queryrules/deleterule/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -package deletemodel +package deleterule -// Response holds the response body struct for the package deletemodel +// Response holds the response body struct for the package deleterule // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/inference/delete_model/DeleteModelResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/delete_rule/QueryRuleDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/queryruleset/delete/delete.go b/typedapi/queryrules/deleteruleset/delete_ruleset.go similarity index 66% rename from typedapi/queryruleset/delete/delete.go rename to typedapi/queryrules/deleteruleset/delete_ruleset.go index 00792067af..51ff281b50 100644 --- a/typedapi/queryruleset/delete/delete.go +++ b/typedapi/queryrules/deleteruleset/delete_ruleset.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes a query ruleset. -package delete +package deleteruleset import ( "context" @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -43,7 +43,7 @@ const ( // ErrBuildPath is returned in case of missing parameters within the build of the request. var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") -type Delete struct { +type DeleteRuleset struct { transport elastictransport.Interface headers http.Header @@ -61,13 +61,13 @@ type Delete struct { instrument elastictransport.Instrumentation } -// NewDelete type alias for index. -type NewDelete func(rulesetid string) *Delete +// NewDeleteRuleset type alias for index. +type NewDeleteRuleset func(rulesetid string) *DeleteRuleset -// NewDeleteFunc returns a new instance of Delete with the provided transport. +// NewDeleteRulesetFunc returns a new instance of DeleteRuleset with the provided transport. // Used in the index of the library this allows to retrieve every apis in once place. -func NewDeleteFunc(tp elastictransport.Interface) NewDelete { - return func(rulesetid string) *Delete { +func NewDeleteRulesetFunc(tp elastictransport.Interface) NewDeleteRuleset { + return func(rulesetid string) *DeleteRuleset { n := New(tp) n._rulesetid(rulesetid) @@ -79,8 +79,8 @@ func NewDeleteFunc(tp elastictransport.Interface) NewDelete { // Deletes a query ruleset. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-query-ruleset.html -func New(tp elastictransport.Interface) *Delete { - r := &Delete{ +func New(tp elastictransport.Interface) *DeleteRuleset { + r := &DeleteRuleset{ transport: tp, values: make(url.Values), headers: make(http.Header), @@ -97,7 +97,7 @@ func New(tp elastictransport.Interface) *Delete { // HttpRequest returns the http.Request object built from the // given parameters. -func (r *Delete) HttpRequest(ctx context.Context) (*http.Request, error) { +func (r *DeleteRuleset) HttpRequest(ctx context.Context) (*http.Request, error) { var path strings.Builder var method string var req *http.Request @@ -147,11 +147,11 @@ func (r *Delete) HttpRequest(ctx context.Context) (*http.Request, error) { } // Perform runs the http.Request through the provided transport and returns an http.Response. -func (r Delete) Perform(providedCtx context.Context) (*http.Response, error) { +func (r DeleteRuleset) Perform(providedCtx context.Context) (*http.Response, error) { var ctx context.Context if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { if r.spanStarted == false { - ctx := instrument.Start(providedCtx, "query_ruleset.delete") + ctx := instrument.Start(providedCtx, "query_rules.delete_ruleset") defer instrument.Close(ctx) } } @@ -168,17 +168,17 @@ func (r Delete) Perform(providedCtx context.Context) (*http.Response, error) { } if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.BeforeRequest(req, "query_ruleset.delete") - if reader := instrument.RecordRequestBody(ctx, "query_ruleset.delete", r.raw); reader != nil { + instrument.BeforeRequest(req, "query_rules.delete_ruleset") + if reader := instrument.RecordRequestBody(ctx, "query_rules.delete_ruleset", r.raw); reader != nil { req.Body = reader } } res, err := r.transport.Perform(req) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "query_ruleset.delete") + instrument.AfterRequest(req, "elasticsearch", "query_rules.delete_ruleset") } if err != nil { - localErr := fmt.Errorf("an error happened during the Delete query execution: %w", err) + localErr := fmt.Errorf("an error happened during the DeleteRuleset query execution: %w", err) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, localErr) } @@ -188,12 +188,12 @@ func (r Delete) Perform(providedCtx context.Context) (*http.Response, error) { return res, nil } -// Do runs the request through the transport, handle the response and returns a delete.Response -func (r Delete) Do(providedCtx context.Context) (*Response, error) { +// Do runs the request through the transport, handle the response and returns a deleteruleset.Response +func (r DeleteRuleset) Do(providedCtx context.Context) (*Response, error) { var ctx context.Context r.spanStarted = true if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - ctx = instrument.Start(providedCtx, "query_ruleset.delete") + ctx = instrument.Start(providedCtx, "query_rules.delete_ruleset") defer instrument.Close(ctx) } if ctx == nil { @@ -244,11 +244,11 @@ func (r Delete) Do(providedCtx context.Context) (*Response, error) { // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. -func (r Delete) IsSuccess(providedCtx context.Context) (bool, error) { +func (r DeleteRuleset) IsSuccess(providedCtx context.Context) (bool, error) { var ctx context.Context r.spanStarted = true if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - ctx = instrument.Start(providedCtx, "query_ruleset.delete") + ctx = instrument.Start(providedCtx, "query_rules.delete_ruleset") defer instrument.Close(ctx) } if ctx == nil { @@ -260,7 +260,7 @@ func (r Delete) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -271,7 +271,7 @@ func (r Delete) IsSuccess(providedCtx context.Context) (bool, error) { } if res.StatusCode != 404 { - err := fmt.Errorf("an error happened during the Delete query execution, status code: %d", res.StatusCode) + err := fmt.Errorf("an error happened during the DeleteRuleset query execution, status code: %d", res.StatusCode) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, err) } @@ -281,8 +281,8 @@ func (r Delete) IsSuccess(providedCtx context.Context) (bool, error) { return false, nil } -// Header set a key, value pair in the Delete headers map. -func (r *Delete) Header(key, value string) *Delete { +// Header set a key, value pair in the DeleteRuleset headers map. +func (r *DeleteRuleset) Header(key, value string) *DeleteRuleset { r.headers.Set(key, value) return r @@ -290,9 +290,53 @@ func (r *Delete) Header(key, value string) *Delete { // RulesetId The unique identifier of the query ruleset to delete // API Name: rulesetid -func (r *Delete) _rulesetid(rulesetid string) *Delete { +func (r *DeleteRuleset) _rulesetid(rulesetid string) *DeleteRuleset { r.paramSet |= rulesetidMask r.rulesetid = rulesetid return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteRuleset) ErrorTrace(errortrace bool) *DeleteRuleset { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteRuleset) FilterPath(filterpaths ...string) *DeleteRuleset { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteRuleset) Human(human bool) *DeleteRuleset { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteRuleset) Pretty(pretty bool) *DeleteRuleset { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/queryruleset/delete/response.go b/typedapi/queryrules/deleteruleset/response.go similarity index 79% rename from typedapi/queryruleset/delete/response.go rename to typedapi/queryrules/deleteruleset/response.go index d4305c686b..f23fb8f98d 100644 --- a/typedapi/queryruleset/delete/response.go +++ b/typedapi/queryrules/deleteruleset/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -package delete +package deleteruleset -// Response holds the response body struct for the package delete +// Response holds the response body struct for the package deleteruleset // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/query_ruleset/delete/QueryRulesetDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/delete_ruleset/QueryRulesetDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/queryrules/getrule/get_rule.go b/typedapi/queryrules/getrule/get_rule.go new file mode 100644 index 0000000000..0bb6f89f27 --- /dev/null +++ b/typedapi/queryrules/getrule/get_rule.go @@ -0,0 +1,365 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Returns the details about a query rule within a query ruleset +package getrule + +import ( + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strconv" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" +) + +const ( + rulesetidMask = iota + 1 + + ruleidMask +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type GetRule struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + raw io.Reader + + paramSet int + + rulesetid string + ruleid string + + spanStarted bool + + instrument elastictransport.Instrumentation +} + +// NewGetRule type alias for index. +type NewGetRule func(rulesetid, ruleid string) *GetRule + +// NewGetRuleFunc returns a new instance of GetRule with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewGetRuleFunc(tp elastictransport.Interface) NewGetRule { + return func(rulesetid, ruleid string) *GetRule { + n := New(tp) + + n._rulesetid(rulesetid) + + n._ruleid(ruleid) + + return n + } +} + +// Returns the details about a query rule within a query ruleset +// +// https://www.elastic.co/guide/en/elasticsearch/reference/current/get-query-rule.html +func New(tp elastictransport.Interface) *GetRule { + r := &GetRule{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + } + + if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { + if instrument := instrumented.InstrumentationEnabled(); instrument != nil { + r.instrument = instrument + } + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *GetRule) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == rulesetidMask|ruleidMask: + path.WriteString("/") + path.WriteString("_query_rules") + path.WriteString("/") + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordPathPart(ctx, "rulesetid", r.rulesetid) + } + path.WriteString(r.rulesetid) + path.WriteString("/") + path.WriteString("_rule") + path.WriteString("/") + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordPathPart(ctx, "ruleid", r.ruleid) + } + path.WriteString(r.ruleid) + + method = http.MethodGet + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.raw) + } else { + req, err = http.NewRequest(method, r.path.String(), r.raw) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r GetRule) Perform(providedCtx context.Context) (*http.Response, error) { + var ctx context.Context + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + if r.spanStarted == false { + ctx := instrument.Start(providedCtx, "query_rules.get_rule") + defer instrument.Close(ctx) + } + } + if ctx == nil { + ctx = providedCtx + } + + req, err := r.HttpRequest(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.BeforeRequest(req, "query_rules.get_rule") + if reader := instrument.RecordRequestBody(ctx, "query_rules.get_rule", r.raw); reader != nil { + req.Body = reader + } + } + res, err := r.transport.Perform(req) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "query_rules.get_rule") + } + if err != nil { + localErr := fmt.Errorf("an error happened during the GetRule query execution: %w", err) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, localErr) + } + return nil, localErr + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a getrule.Response +func (r GetRule) Do(providedCtx context.Context) (*Response, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "query_rules.get_rule") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + response := NewResponse() + + res, err := r.Perform(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + defer res.Body.Close() + + if res.StatusCode < 299 { + err = json.NewDecoder(res.Body).Decode(response) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + return response, nil + } + + errorResponse := types.NewElasticsearchError() + err = json.NewDecoder(res.Body).Decode(errorResponse) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if errorResponse.Status == 0 { + errorResponse.Status = res.StatusCode + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, errorResponse) + } + return nil, errorResponse +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r GetRule) IsSuccess(providedCtx context.Context) (bool, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "query_rules.get_rule") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(io.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + if res.StatusCode != 404 { + err := fmt.Errorf("an error happened during the GetRule query execution, status code: %d", res.StatusCode) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return false, err + } + + return false, nil +} + +// Header set a key, value pair in the GetRule headers map. +func (r *GetRule) Header(key, value string) *GetRule { + r.headers.Set(key, value) + + return r +} + +// RulesetId The unique identifier of the query ruleset containing the rule to retrieve +// API Name: rulesetid +func (r *GetRule) _rulesetid(rulesetid string) *GetRule { + r.paramSet |= rulesetidMask + r.rulesetid = rulesetid + + return r +} + +// RuleId The unique identifier of the query rule within the specified ruleset to +// retrieve +// API Name: ruleid +func (r *GetRule) _ruleid(ruleid string) *GetRule { + r.paramSet |= ruleidMask + r.ruleid = ruleid + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetRule) ErrorTrace(errortrace bool) *GetRule { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetRule) FilterPath(filterpaths ...string) *GetRule { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetRule) Human(human bool) *GetRule { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetRule) Pretty(pretty bool) *GetRule { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/queryrules/getrule/response.go b/typedapi/queryrules/getrule/response.go new file mode 100644 index 0000000000..6a18f3e87e --- /dev/null +++ b/typedapi/queryrules/getrule/response.go @@ -0,0 +1,42 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package getrule + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types" + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/queryruletype" +) + +// Response holds the response body struct for the package getrule +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/get_rule/QueryRuleGetResponse.ts#L22-L24 +type Response struct { + Actions types.QueryRuleActions `json:"actions"` + Criteria []types.QueryRuleCriteria `json:"criteria"` + RuleId string `json:"rule_id"` + Type queryruletype.QueryRuleType `json:"type"` +} + +// NewResponse returns a Response +func NewResponse() *Response { + r := &Response{} + return r +} diff --git a/typedapi/queryruleset/get/get.go b/typedapi/queryrules/getruleset/get_ruleset.go similarity index 66% rename from typedapi/queryruleset/get/get.go rename to typedapi/queryrules/getruleset/get_ruleset.go index 5ec9d10bb7..d8320c7155 100644 --- a/typedapi/queryruleset/get/get.go +++ b/typedapi/queryrules/getruleset/get_ruleset.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns the details about a query ruleset. -package get +// Returns the details about a query ruleset +package getruleset import ( "context" @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -43,7 +43,7 @@ const ( // ErrBuildPath is returned in case of missing parameters within the build of the request. var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") -type Get struct { +type GetRuleset struct { transport elastictransport.Interface headers http.Header @@ -61,13 +61,13 @@ type Get struct { instrument elastictransport.Instrumentation } -// NewGet type alias for index. -type NewGet func(rulesetid string) *Get +// NewGetRuleset type alias for index. +type NewGetRuleset func(rulesetid string) *GetRuleset -// NewGetFunc returns a new instance of Get with the provided transport. +// NewGetRulesetFunc returns a new instance of GetRuleset with the provided transport. // Used in the index of the library this allows to retrieve every apis in once place. -func NewGetFunc(tp elastictransport.Interface) NewGet { - return func(rulesetid string) *Get { +func NewGetRulesetFunc(tp elastictransport.Interface) NewGetRuleset { + return func(rulesetid string) *GetRuleset { n := New(tp) n._rulesetid(rulesetid) @@ -76,11 +76,11 @@ func NewGetFunc(tp elastictransport.Interface) NewGet { } } -// Returns the details about a query ruleset. +// Returns the details about a query ruleset // // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-query-ruleset.html -func New(tp elastictransport.Interface) *Get { - r := &Get{ +func New(tp elastictransport.Interface) *GetRuleset { + r := &GetRuleset{ transport: tp, values: make(url.Values), headers: make(http.Header), @@ -97,7 +97,7 @@ func New(tp elastictransport.Interface) *Get { // HttpRequest returns the http.Request object built from the // given parameters. -func (r *Get) HttpRequest(ctx context.Context) (*http.Request, error) { +func (r *GetRuleset) HttpRequest(ctx context.Context) (*http.Request, error) { var path strings.Builder var method string var req *http.Request @@ -147,11 +147,11 @@ func (r *Get) HttpRequest(ctx context.Context) (*http.Request, error) { } // Perform runs the http.Request through the provided transport and returns an http.Response. -func (r Get) Perform(providedCtx context.Context) (*http.Response, error) { +func (r GetRuleset) Perform(providedCtx context.Context) (*http.Response, error) { var ctx context.Context if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { if r.spanStarted == false { - ctx := instrument.Start(providedCtx, "query_ruleset.get") + ctx := instrument.Start(providedCtx, "query_rules.get_ruleset") defer instrument.Close(ctx) } } @@ -168,17 +168,17 @@ func (r Get) Perform(providedCtx context.Context) (*http.Response, error) { } if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.BeforeRequest(req, "query_ruleset.get") - if reader := instrument.RecordRequestBody(ctx, "query_ruleset.get", r.raw); reader != nil { + instrument.BeforeRequest(req, "query_rules.get_ruleset") + if reader := instrument.RecordRequestBody(ctx, "query_rules.get_ruleset", r.raw); reader != nil { req.Body = reader } } res, err := r.transport.Perform(req) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "query_ruleset.get") + instrument.AfterRequest(req, "elasticsearch", "query_rules.get_ruleset") } if err != nil { - localErr := fmt.Errorf("an error happened during the Get query execution: %w", err) + localErr := fmt.Errorf("an error happened during the GetRuleset query execution: %w", err) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, localErr) } @@ -188,12 +188,12 @@ func (r Get) Perform(providedCtx context.Context) (*http.Response, error) { return res, nil } -// Do runs the request through the transport, handle the response and returns a get.Response -func (r Get) Do(providedCtx context.Context) (*Response, error) { +// Do runs the request through the transport, handle the response and returns a getruleset.Response +func (r GetRuleset) Do(providedCtx context.Context) (*Response, error) { var ctx context.Context r.spanStarted = true if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - ctx = instrument.Start(providedCtx, "query_ruleset.get") + ctx = instrument.Start(providedCtx, "query_rules.get_ruleset") defer instrument.Close(ctx) } if ctx == nil { @@ -244,11 +244,11 @@ func (r Get) Do(providedCtx context.Context) (*Response, error) { // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. -func (r Get) IsSuccess(providedCtx context.Context) (bool, error) { +func (r GetRuleset) IsSuccess(providedCtx context.Context) (bool, error) { var ctx context.Context r.spanStarted = true if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - ctx = instrument.Start(providedCtx, "query_ruleset.get") + ctx = instrument.Start(providedCtx, "query_rules.get_ruleset") defer instrument.Close(ctx) } if ctx == nil { @@ -260,7 +260,7 @@ func (r Get) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -271,7 +271,7 @@ func (r Get) IsSuccess(providedCtx context.Context) (bool, error) { } if res.StatusCode != 404 { - err := fmt.Errorf("an error happened during the Get query execution, status code: %d", res.StatusCode) + err := fmt.Errorf("an error happened during the GetRuleset query execution, status code: %d", res.StatusCode) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, err) } @@ -281,8 +281,8 @@ func (r Get) IsSuccess(providedCtx context.Context) (bool, error) { return false, nil } -// Header set a key, value pair in the Get headers map. -func (r *Get) Header(key, value string) *Get { +// Header set a key, value pair in the GetRuleset headers map. +func (r *GetRuleset) Header(key, value string) *GetRuleset { r.headers.Set(key, value) return r @@ -290,9 +290,53 @@ func (r *Get) Header(key, value string) *Get { // RulesetId The unique identifier of the query ruleset // API Name: rulesetid -func (r *Get) _rulesetid(rulesetid string) *Get { +func (r *GetRuleset) _rulesetid(rulesetid string) *GetRuleset { r.paramSet |= rulesetidMask r.rulesetid = rulesetid return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetRuleset) ErrorTrace(errortrace bool) *GetRuleset { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetRuleset) FilterPath(filterpaths ...string) *GetRuleset { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetRuleset) Human(human bool) *GetRuleset { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetRuleset) Pretty(pretty bool) *GetRuleset { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/queryruleset/get/response.go b/typedapi/queryrules/getruleset/response.go similarity index 76% rename from typedapi/queryruleset/get/response.go rename to typedapi/queryrules/getruleset/response.go index 3bd13337de..1067c8f6a6 100644 --- a/typedapi/queryruleset/get/response.go +++ b/typedapi/queryrules/getruleset/response.go @@ -16,17 +16,17 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -package get +package getruleset import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) -// Response holds the response body struct for the package get +// Response holds the response body struct for the package getruleset // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/query_ruleset/get/QueryRulesetGetResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/get_ruleset/QueryRulesetGetResponse.ts#L22-L24 type Response struct { // Rules Rules associated with the query ruleset diff --git a/typedapi/queryruleset/list/list.go b/typedapi/queryrules/listrulesets/list_rulesets.go similarity index 64% rename from typedapi/queryruleset/list/list.go rename to typedapi/queryrules/listrulesets/list_rulesets.go index 1b4e6afe70..34eb9ea2a1 100644 --- a/typedapi/queryruleset/list/list.go +++ b/typedapi/queryrules/listrulesets/list_rulesets.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Lists query rulesets. -package list +// Returns summarized information about existing query rulesets. +package listrulesets import ( "context" @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -40,7 +39,7 @@ import ( // ErrBuildPath is returned in case of missing parameters within the build of the request. var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") -type List struct { +type ListRulesets struct { transport elastictransport.Interface headers http.Header @@ -56,24 +55,24 @@ type List struct { instrument elastictransport.Instrumentation } -// NewList type alias for index. -type NewList func() *List +// NewListRulesets type alias for index. +type NewListRulesets func() *ListRulesets -// NewListFunc returns a new instance of List with the provided transport. +// NewListRulesetsFunc returns a new instance of ListRulesets with the provided transport. // Used in the index of the library this allows to retrieve every apis in once place. -func NewListFunc(tp elastictransport.Interface) NewList { - return func() *List { +func NewListRulesetsFunc(tp elastictransport.Interface) NewListRulesets { + return func() *ListRulesets { n := New(tp) return n } } -// Lists query rulesets. +// Returns summarized information about existing query rulesets. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/list-query-rulesets.html -func New(tp elastictransport.Interface) *List { - r := &List{ +func New(tp elastictransport.Interface) *ListRulesets { + r := &ListRulesets{ transport: tp, values: make(url.Values), headers: make(http.Header), @@ -90,7 +89,7 @@ func New(tp elastictransport.Interface) *List { // HttpRequest returns the http.Request object built from the // given parameters. -func (r *List) HttpRequest(ctx context.Context) (*http.Request, error) { +func (r *ListRulesets) HttpRequest(ctx context.Context) (*http.Request, error) { var path strings.Builder var method string var req *http.Request @@ -134,11 +133,11 @@ func (r *List) HttpRequest(ctx context.Context) (*http.Request, error) { } // Perform runs the http.Request through the provided transport and returns an http.Response. -func (r List) Perform(providedCtx context.Context) (*http.Response, error) { +func (r ListRulesets) Perform(providedCtx context.Context) (*http.Response, error) { var ctx context.Context if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { if r.spanStarted == false { - ctx := instrument.Start(providedCtx, "query_ruleset.list") + ctx := instrument.Start(providedCtx, "query_rules.list_rulesets") defer instrument.Close(ctx) } } @@ -155,17 +154,17 @@ func (r List) Perform(providedCtx context.Context) (*http.Response, error) { } if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.BeforeRequest(req, "query_ruleset.list") - if reader := instrument.RecordRequestBody(ctx, "query_ruleset.list", r.raw); reader != nil { + instrument.BeforeRequest(req, "query_rules.list_rulesets") + if reader := instrument.RecordRequestBody(ctx, "query_rules.list_rulesets", r.raw); reader != nil { req.Body = reader } } res, err := r.transport.Perform(req) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "query_ruleset.list") + instrument.AfterRequest(req, "elasticsearch", "query_rules.list_rulesets") } if err != nil { - localErr := fmt.Errorf("an error happened during the List query execution: %w", err) + localErr := fmt.Errorf("an error happened during the ListRulesets query execution: %w", err) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, localErr) } @@ -175,12 +174,12 @@ func (r List) Perform(providedCtx context.Context) (*http.Response, error) { return res, nil } -// Do runs the request through the transport, handle the response and returns a list.Response -func (r List) Do(providedCtx context.Context) (*Response, error) { +// Do runs the request through the transport, handle the response and returns a listrulesets.Response +func (r ListRulesets) Do(providedCtx context.Context) (*Response, error) { var ctx context.Context r.spanStarted = true if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - ctx = instrument.Start(providedCtx, "query_ruleset.list") + ctx = instrument.Start(providedCtx, "query_rules.list_rulesets") defer instrument.Close(ctx) } if ctx == nil { @@ -231,11 +230,11 @@ func (r List) Do(providedCtx context.Context) (*Response, error) { // IsSuccess allows to run a query with a context and retrieve the result as a boolean. // This only exists for endpoints without a request payload and allows for quick control flow. -func (r List) IsSuccess(providedCtx context.Context) (bool, error) { +func (r ListRulesets) IsSuccess(providedCtx context.Context) (bool, error) { var ctx context.Context r.spanStarted = true if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - ctx = instrument.Start(providedCtx, "query_ruleset.list") + ctx = instrument.Start(providedCtx, "query_rules.list_rulesets") defer instrument.Close(ctx) } if ctx == nil { @@ -247,7 +246,7 @@ func (r List) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -258,7 +257,7 @@ func (r List) IsSuccess(providedCtx context.Context) (bool, error) { } if res.StatusCode != 404 { - err := fmt.Errorf("an error happened during the List query execution, status code: %d", res.StatusCode) + err := fmt.Errorf("an error happened during the ListRulesets query execution, status code: %d", res.StatusCode) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, err) } @@ -268,8 +267,8 @@ func (r List) IsSuccess(providedCtx context.Context) (bool, error) { return false, nil } -// Header set a key, value pair in the List headers map. -func (r *List) Header(key, value string) *List { +// Header set a key, value pair in the ListRulesets headers map. +func (r *ListRulesets) Header(key, value string) *ListRulesets { r.headers.Set(key, value) return r @@ -277,7 +276,7 @@ func (r *List) Header(key, value string) *List { // From Starting offset (default: 0) // API name: from -func (r *List) From(from int) *List { +func (r *ListRulesets) From(from int) *ListRulesets { r.values.Set("from", strconv.Itoa(from)) return r @@ -285,8 +284,52 @@ func (r *List) From(from int) *List { // Size specifies a max number of results to get // API name: size -func (r *List) Size(size int) *List { +func (r *ListRulesets) Size(size int) *ListRulesets { r.values.Set("size", strconv.Itoa(size)) return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ListRulesets) ErrorTrace(errortrace bool) *ListRulesets { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ListRulesets) FilterPath(filterpaths ...string) *ListRulesets { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ListRulesets) Human(human bool) *ListRulesets { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ListRulesets) Pretty(pretty bool) *ListRulesets { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/queryruleset/list/response.go b/typedapi/queryrules/listrulesets/response.go similarity index 75% rename from typedapi/queryruleset/list/response.go rename to typedapi/queryrules/listrulesets/response.go index 90cbc8260a..90caf0306d 100644 --- a/typedapi/queryruleset/list/response.go +++ b/typedapi/queryrules/listrulesets/response.go @@ -16,17 +16,17 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -package list +package listrulesets import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) -// Response holds the response body struct for the package list +// Response holds the response body struct for the package listrulesets // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/query_ruleset/list/QueryRulesetListResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/list_rulesets/QueryRulesetListResponse.ts#L23-L28 type Response struct { Count int64 `json:"count"` Results []types.QueryRulesetListItem `json:"results"` diff --git a/typedapi/queryrules/putrule/put_rule.go b/typedapi/queryrules/putrule/put_rule.go new file mode 100644 index 0000000000..cae47318ed --- /dev/null +++ b/typedapi/queryrules/putrule/put_rule.go @@ -0,0 +1,405 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Creates or updates a query rule within a query ruleset. +package putrule + +import ( + gobytes "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strconv" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" + "github.com/elastic/go-elasticsearch/v8/typedapi/types" + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/queryruletype" +) + +const ( + rulesetidMask = iota + 1 + + ruleidMask +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type PutRule struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + raw io.Reader + + req *Request + deferred []func(request *Request) error + buf *gobytes.Buffer + + paramSet int + + rulesetid string + ruleid string + + spanStarted bool + + instrument elastictransport.Instrumentation +} + +// NewPutRule type alias for index. +type NewPutRule func(rulesetid, ruleid string) *PutRule + +// NewPutRuleFunc returns a new instance of PutRule with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewPutRuleFunc(tp elastictransport.Interface) NewPutRule { + return func(rulesetid, ruleid string) *PutRule { + n := New(tp) + + n._rulesetid(rulesetid) + + n._ruleid(ruleid) + + return n + } +} + +// Creates or updates a query rule within a query ruleset. +// +// https://www.elastic.co/guide/en/elasticsearch/reference/current/put-query-rule.html +func New(tp elastictransport.Interface) *PutRule { + r := &PutRule{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + + buf: gobytes.NewBuffer(nil), + + req: NewRequest(), + } + + if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { + if instrument := instrumented.InstrumentationEnabled(); instrument != nil { + r.instrument = instrument + } + } + + return r +} + +// Raw takes a json payload as input which is then passed to the http.Request +// If specified Raw takes precedence on Request method. +func (r *PutRule) Raw(raw io.Reader) *PutRule { + r.raw = raw + + return r +} + +// Request allows to set the request property with the appropriate payload. +func (r *PutRule) Request(req *Request) *PutRule { + r.req = req + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *PutRule) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + if len(r.deferred) > 0 { + for _, f := range r.deferred { + deferredErr := f(r.req) + if deferredErr != nil { + return nil, deferredErr + } + } + } + + if r.raw == nil && r.req != nil { + + data, err := json.Marshal(r.req) + + if err != nil { + return nil, fmt.Errorf("could not serialise request for PutRule: %w", err) + } + + r.buf.Write(data) + + } + + if r.buf.Len() > 0 { + r.raw = r.buf + } + + r.path.Scheme = "http" + + switch { + case r.paramSet == rulesetidMask|ruleidMask: + path.WriteString("/") + path.WriteString("_query_rules") + path.WriteString("/") + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordPathPart(ctx, "rulesetid", r.rulesetid) + } + path.WriteString(r.rulesetid) + path.WriteString("/") + path.WriteString("_rule") + path.WriteString("/") + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordPathPart(ctx, "ruleid", r.ruleid) + } + path.WriteString(r.ruleid) + + method = http.MethodPut + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.raw) + } else { + req, err = http.NewRequest(method, r.path.String(), r.raw) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Content-Type") == "" { + if r.raw != nil { + req.Header.Set("Content-Type", "application/vnd.elasticsearch+json;compatible-with=8") + } + } + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r PutRule) Perform(providedCtx context.Context) (*http.Response, error) { + var ctx context.Context + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + if r.spanStarted == false { + ctx := instrument.Start(providedCtx, "query_rules.put_rule") + defer instrument.Close(ctx) + } + } + if ctx == nil { + ctx = providedCtx + } + + req, err := r.HttpRequest(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.BeforeRequest(req, "query_rules.put_rule") + if reader := instrument.RecordRequestBody(ctx, "query_rules.put_rule", r.raw); reader != nil { + req.Body = reader + } + } + res, err := r.transport.Perform(req) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "query_rules.put_rule") + } + if err != nil { + localErr := fmt.Errorf("an error happened during the PutRule query execution: %w", err) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, localErr) + } + return nil, localErr + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a putrule.Response +func (r PutRule) Do(providedCtx context.Context) (*Response, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "query_rules.put_rule") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + response := NewResponse() + + res, err := r.Perform(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + defer res.Body.Close() + + if res.StatusCode < 299 { + err = json.NewDecoder(res.Body).Decode(response) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + return response, nil + } + + errorResponse := types.NewElasticsearchError() + err = json.NewDecoder(res.Body).Decode(errorResponse) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if errorResponse.Status == 0 { + errorResponse.Status = res.StatusCode + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, errorResponse) + } + return nil, errorResponse +} + +// Header set a key, value pair in the PutRule headers map. +func (r *PutRule) Header(key, value string) *PutRule { + r.headers.Set(key, value) + + return r +} + +// RulesetId The unique identifier of the query ruleset containing the rule to be created +// or updated +// API Name: rulesetid +func (r *PutRule) _rulesetid(rulesetid string) *PutRule { + r.paramSet |= rulesetidMask + r.rulesetid = rulesetid + + return r +} + +// RuleId The unique identifier of the query rule within the specified ruleset to be +// created or updated +// API Name: ruleid +func (r *PutRule) _ruleid(ruleid string) *PutRule { + r.paramSet |= ruleidMask + r.ruleid = ruleid + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutRule) ErrorTrace(errortrace bool) *PutRule { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutRule) FilterPath(filterpaths ...string) *PutRule { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutRule) Human(human bool) *PutRule { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutRule) Pretty(pretty bool) *PutRule { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + +// API name: actions +func (r *PutRule) Actions(actions *types.QueryRuleActions) *PutRule { + + r.req.Actions = *actions + + return r +} + +// API name: criteria +func (r *PutRule) Criteria(criteria ...types.QueryRuleCriteria) *PutRule { + r.req.Criteria = criteria + + return r +} + +// API name: type +func (r *PutRule) Type(type_ queryruletype.QueryRuleType) *PutRule { + r.req.Type = type_ + + return r +} diff --git a/typedapi/queryrules/putrule/request.go b/typedapi/queryrules/putrule/request.go new file mode 100644 index 0000000000..70a0a868a2 --- /dev/null +++ b/typedapi/queryrules/putrule/request.go @@ -0,0 +1,57 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package putrule + +import ( + "encoding/json" + "fmt" + + "github.com/elastic/go-elasticsearch/v8/typedapi/types" + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/queryruletype" +) + +// Request holds the request body struct for the package putrule +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/put_rule/QueryRulePutRequest.ts#L27-L54 +type Request struct { + Actions types.QueryRuleActions `json:"actions"` + Criteria []types.QueryRuleCriteria `json:"criteria"` + Type queryruletype.QueryRuleType `json:"type"` +} + +// NewRequest returns a Request +func NewRequest() *Request { + r := &Request{} + + return r +} + +// FromJSON allows to load an arbitrary json into the request structure +func (r *Request) FromJSON(data string) (*Request, error) { + var req Request + err := json.Unmarshal([]byte(data), &req) + + if err != nil { + return nil, fmt.Errorf("could not deserialise json into Putrule request: %w", err) + } + + return &req, nil +} diff --git a/typedapi/queryruleset/put/response.go b/typedapi/queryrules/putrule/response.go similarity index 75% rename from typedapi/queryruleset/put/response.go rename to typedapi/queryrules/putrule/response.go index 00a6c1d060..6688ecf991 100644 --- a/typedapi/queryruleset/put/response.go +++ b/typedapi/queryrules/putrule/response.go @@ -16,17 +16,17 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -package put +package putrule import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/result" ) -// Response holds the response body struct for the package put +// Response holds the response body struct for the package putrule // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/query_ruleset/put/QueryRulesetPutResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/put_rule/QueryRulePutResponse.ts#L22-L26 type Response struct { Result result.Result `json:"result"` } diff --git a/typedapi/queryruleset/put/put.go b/typedapi/queryrules/putruleset/put_ruleset.go similarity index 67% rename from typedapi/queryruleset/put/put.go rename to typedapi/queryrules/putruleset/put_ruleset.go index 2199a7944e..5e4ceed01e 100644 --- a/typedapi/queryruleset/put/put.go +++ b/typedapi/queryrules/putruleset/put_ruleset.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates or updates a query ruleset. -package put +package putruleset import ( gobytes "bytes" @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -43,7 +44,7 @@ const ( // ErrBuildPath is returned in case of missing parameters within the build of the request. var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") -type Put struct { +type PutRuleset struct { transport elastictransport.Interface headers http.Header @@ -65,13 +66,13 @@ type Put struct { instrument elastictransport.Instrumentation } -// NewPut type alias for index. -type NewPut func(rulesetid string) *Put +// NewPutRuleset type alias for index. +type NewPutRuleset func(rulesetid string) *PutRuleset -// NewPutFunc returns a new instance of Put with the provided transport. +// NewPutRulesetFunc returns a new instance of PutRuleset with the provided transport. // Used in the index of the library this allows to retrieve every apis in once place. -func NewPutFunc(tp elastictransport.Interface) NewPut { - return func(rulesetid string) *Put { +func NewPutRulesetFunc(tp elastictransport.Interface) NewPutRuleset { + return func(rulesetid string) *PutRuleset { n := New(tp) n._rulesetid(rulesetid) @@ -83,8 +84,8 @@ func NewPutFunc(tp elastictransport.Interface) NewPut { // Creates or updates a query ruleset. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-query-ruleset.html -func New(tp elastictransport.Interface) *Put { - r := &Put{ +func New(tp elastictransport.Interface) *PutRuleset { + r := &PutRuleset{ transport: tp, values: make(url.Values), headers: make(http.Header), @@ -105,14 +106,14 @@ func New(tp elastictransport.Interface) *Put { // Raw takes a json payload as input which is then passed to the http.Request // If specified Raw takes precedence on Request method. -func (r *Put) Raw(raw io.Reader) *Put { +func (r *PutRuleset) Raw(raw io.Reader) *PutRuleset { r.raw = raw return r } // Request allows to set the request property with the appropriate payload. -func (r *Put) Request(req *Request) *Put { +func (r *PutRuleset) Request(req *Request) *PutRuleset { r.req = req return r @@ -120,7 +121,7 @@ func (r *Put) Request(req *Request) *Put { // HttpRequest returns the http.Request object built from the // given parameters. -func (r *Put) HttpRequest(ctx context.Context) (*http.Request, error) { +func (r *PutRuleset) HttpRequest(ctx context.Context) (*http.Request, error) { var path strings.Builder var method string var req *http.Request @@ -141,7 +142,7 @@ func (r *Put) HttpRequest(ctx context.Context) (*http.Request, error) { data, err := json.Marshal(r.req) if err != nil { - return nil, fmt.Errorf("could not serialise request for Put: %w", err) + return nil, fmt.Errorf("could not serialise request for PutRuleset: %w", err) } r.buf.Write(data) @@ -201,11 +202,11 @@ func (r *Put) HttpRequest(ctx context.Context) (*http.Request, error) { } // Perform runs the http.Request through the provided transport and returns an http.Response. -func (r Put) Perform(providedCtx context.Context) (*http.Response, error) { +func (r PutRuleset) Perform(providedCtx context.Context) (*http.Response, error) { var ctx context.Context if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { if r.spanStarted == false { - ctx := instrument.Start(providedCtx, "query_ruleset.put") + ctx := instrument.Start(providedCtx, "query_rules.put_ruleset") defer instrument.Close(ctx) } } @@ -222,17 +223,17 @@ func (r Put) Perform(providedCtx context.Context) (*http.Response, error) { } if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.BeforeRequest(req, "query_ruleset.put") - if reader := instrument.RecordRequestBody(ctx, "query_ruleset.put", r.raw); reader != nil { + instrument.BeforeRequest(req, "query_rules.put_ruleset") + if reader := instrument.RecordRequestBody(ctx, "query_rules.put_ruleset", r.raw); reader != nil { req.Body = reader } } res, err := r.transport.Perform(req) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - instrument.AfterRequest(req, "elasticsearch", "query_ruleset.put") + instrument.AfterRequest(req, "elasticsearch", "query_rules.put_ruleset") } if err != nil { - localErr := fmt.Errorf("an error happened during the Put query execution: %w", err) + localErr := fmt.Errorf("an error happened during the PutRuleset query execution: %w", err) if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { instrument.RecordError(ctx, localErr) } @@ -242,12 +243,12 @@ func (r Put) Perform(providedCtx context.Context) (*http.Response, error) { return res, nil } -// Do runs the request through the transport, handle the response and returns a put.Response -func (r Put) Do(providedCtx context.Context) (*Response, error) { +// Do runs the request through the transport, handle the response and returns a putruleset.Response +func (r PutRuleset) Do(providedCtx context.Context) (*Response, error) { var ctx context.Context r.spanStarted = true if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { - ctx = instrument.Start(providedCtx, "query_ruleset.put") + ctx = instrument.Start(providedCtx, "query_rules.put_ruleset") defer instrument.Close(ctx) } if ctx == nil { @@ -296,8 +297,8 @@ func (r Put) Do(providedCtx context.Context) (*Response, error) { return nil, errorResponse } -// Header set a key, value pair in the Put headers map. -func (r *Put) Header(key, value string) *Put { +// Header set a key, value pair in the PutRuleset headers map. +func (r *PutRuleset) Header(key, value string) *PutRuleset { r.headers.Set(key, value) return r @@ -305,15 +306,59 @@ func (r *Put) Header(key, value string) *Put { // RulesetId The unique identifier of the query ruleset to be created or updated // API Name: rulesetid -func (r *Put) _rulesetid(rulesetid string) *Put { +func (r *PutRuleset) _rulesetid(rulesetid string) *PutRuleset { r.paramSet |= rulesetidMask r.rulesetid = rulesetid return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutRuleset) ErrorTrace(errortrace bool) *PutRuleset { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutRuleset) FilterPath(filterpaths ...string) *PutRuleset { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutRuleset) Human(human bool) *PutRuleset { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutRuleset) Pretty(pretty bool) *PutRuleset { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: rules -func (r *Put) Rules(rules ...types.QueryRule) *Put { +func (r *PutRuleset) Rules(rules ...types.QueryRule) *PutRuleset { r.req.Rules = rules return r diff --git a/typedapi/queryruleset/put/request.go b/typedapi/queryrules/putruleset/request.go similarity index 74% rename from typedapi/queryruleset/put/request.go rename to typedapi/queryrules/putruleset/request.go index 8e4052c8e6..97d42c779f 100644 --- a/typedapi/queryruleset/put/request.go +++ b/typedapi/queryrules/putruleset/request.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -package put +package putruleset import ( "encoding/json" @@ -27,9 +27,9 @@ import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types" ) -// Request holds the request body struct for the package put +// Request holds the request body struct for the package putruleset // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/query_ruleset/put/QueryRulesetPutRequest.ts#L23-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/put_ruleset/QueryRulesetPutRequest.ts#L23-L43 type Request struct { Rules []types.QueryRule `json:"rules"` } @@ -37,6 +37,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -46,7 +47,7 @@ func (r *Request) FromJSON(data string) (*Request, error) { err := json.Unmarshal([]byte(data), &req) if err != nil { - return nil, fmt.Errorf("could not deserialise json into Put request: %w", err) + return nil, fmt.Errorf("could not deserialise json into Putruleset request: %w", err) } return &req, nil diff --git a/typedapi/queryrules/putruleset/response.go b/typedapi/queryrules/putruleset/response.go new file mode 100644 index 0000000000..c93b23be5d --- /dev/null +++ b/typedapi/queryrules/putruleset/response.go @@ -0,0 +1,38 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package putruleset + +import ( + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/result" +) + +// Response holds the response body struct for the package putruleset +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/put_ruleset/QueryRulesetPutResponse.ts#L22-L26 +type Response struct { + Result result.Result `json:"result"` +} + +// NewResponse returns a Response +func NewResponse() *Response { + r := &Response{} + return r +} diff --git a/typedapi/rollup/deletejob/delete_job.go b/typedapi/rollup/deletejob/delete_job.go index e9201a44f0..d3efe9e287 100644 --- a/typedapi/rollup/deletejob/delete_job.go +++ b/typedapi/rollup/deletejob/delete_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes an existing rollup job. package deletejob @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -262,7 +262,7 @@ func (r DeleteJob) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -298,3 +298,47 @@ func (r *DeleteJob) _id(id string) *DeleteJob { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteJob) ErrorTrace(errortrace bool) *DeleteJob { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteJob) FilterPath(filterpaths ...string) *DeleteJob { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteJob) Human(human bool) *DeleteJob { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteJob) Pretty(pretty bool) *DeleteJob { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/rollup/deletejob/response.go b/typedapi/rollup/deletejob/response.go index 4f3e04ca41..8e063923f9 100644 --- a/typedapi/rollup/deletejob/response.go +++ b/typedapi/rollup/deletejob/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletejob @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package deletejob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/delete_job/DeleteRollupJobResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/delete_job/DeleteRollupJobResponse.ts#L22-L27 type Response struct { Acknowledged bool `json:"acknowledged"` TaskFailures []types.TaskFailure `json:"task_failures,omitempty"` diff --git a/typedapi/rollup/getjobs/get_jobs.go b/typedapi/rollup/getjobs/get_jobs.go index 5eff4e90dd..ce6e82038f 100644 --- a/typedapi/rollup/getjobs/get_jobs.go +++ b/typedapi/rollup/getjobs/get_jobs.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves the configuration, stats, and status of rollup jobs. package getjobs @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -267,7 +267,7 @@ func (r GetJobs) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -304,3 +304,47 @@ func (r *GetJobs) Id(id string) *GetJobs { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetJobs) ErrorTrace(errortrace bool) *GetJobs { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetJobs) FilterPath(filterpaths ...string) *GetJobs { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetJobs) Human(human bool) *GetJobs { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetJobs) Pretty(pretty bool) *GetJobs { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/rollup/getjobs/response.go b/typedapi/rollup/getjobs/response.go index 29bd661538..17246164ff 100644 --- a/typedapi/rollup/getjobs/response.go +++ b/typedapi/rollup/getjobs/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getjobs @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getjobs // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/get_jobs/GetRollupJobResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/get_jobs/GetRollupJobResponse.ts#L22-L24 type Response struct { Jobs []types.RollupJob `json:"jobs"` } diff --git a/typedapi/rollup/getrollupcaps/get_rollup_caps.go b/typedapi/rollup/getrollupcaps/get_rollup_caps.go index 1d94deb8b1..d182dc780b 100644 --- a/typedapi/rollup/getrollupcaps/get_rollup_caps.go +++ b/typedapi/rollup/getrollupcaps/get_rollup_caps.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns the capabilities of any rollup jobs that have been configured for a // specific index or index pattern. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -269,7 +269,7 @@ func (r GetRollupCaps) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -306,3 +306,47 @@ func (r *GetRollupCaps) Id(id string) *GetRollupCaps { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetRollupCaps) ErrorTrace(errortrace bool) *GetRollupCaps { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetRollupCaps) FilterPath(filterpaths ...string) *GetRollupCaps { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetRollupCaps) Human(human bool) *GetRollupCaps { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetRollupCaps) Pretty(pretty bool) *GetRollupCaps { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/rollup/getrollupcaps/response.go b/typedapi/rollup/getrollupcaps/response.go index 17fe6f25bc..a61d59e697 100644 --- a/typedapi/rollup/getrollupcaps/response.go +++ b/typedapi/rollup/getrollupcaps/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getrollupcaps @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrollupcaps // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/get_rollup_caps/GetRollupCapabilitiesResponse.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/get_rollup_caps/GetRollupCapabilitiesResponse.ts#L24-L27 type Response map[string]types.RollupCapabilities diff --git a/typedapi/rollup/getrollupindexcaps/get_rollup_index_caps.go b/typedapi/rollup/getrollupindexcaps/get_rollup_index_caps.go index 777e6aad42..2d2aa8690e 100644 --- a/typedapi/rollup/getrollupindexcaps/get_rollup_index_caps.go +++ b/typedapi/rollup/getrollupindexcaps/get_rollup_index_caps.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns the rollup capabilities of all jobs inside of a rollup index (e.g. -// the index where rollup data is stored). +// Returns the rollup capabilities of all jobs inside of a rollup index (for +// example, the index where rollup data is stored). package getrollupindexcaps import ( @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -77,8 +77,8 @@ func NewGetRollupIndexCapsFunc(tp elastictransport.Interface) NewGetRollupIndexC } } -// Returns the rollup capabilities of all jobs inside of a rollup index (e.g. -// the index where rollup data is stored). +// Returns the rollup capabilities of all jobs inside of a rollup index (for +// example, the index where rollup data is stored). // // https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-get-rollup-index-caps.html func New(tp elastictransport.Interface) *GetRollupIndexCaps { @@ -264,7 +264,7 @@ func (r GetRollupIndexCaps) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -301,3 +301,47 @@ func (r *GetRollupIndexCaps) _index(index string) *GetRollupIndexCaps { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetRollupIndexCaps) ErrorTrace(errortrace bool) *GetRollupIndexCaps { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetRollupIndexCaps) FilterPath(filterpaths ...string) *GetRollupIndexCaps { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetRollupIndexCaps) Human(human bool) *GetRollupIndexCaps { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetRollupIndexCaps) Pretty(pretty bool) *GetRollupIndexCaps { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/rollup/getrollupindexcaps/response.go b/typedapi/rollup/getrollupindexcaps/response.go index 46435a6639..3181e9aec2 100644 --- a/typedapi/rollup/getrollupindexcaps/response.go +++ b/typedapi/rollup/getrollupindexcaps/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getrollupindexcaps @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrollupindexcaps // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/get_rollup_index_caps/GetRollupIndexCapabilitiesResponse.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/get_rollup_index_caps/GetRollupIndexCapabilitiesResponse.ts#L24-L27 type Response map[string]types.IndexCapabilities diff --git a/typedapi/rollup/putjob/put_job.go b/typedapi/rollup/putjob/put_job.go index d492ee6fb7..094a3966e7 100644 --- a/typedapi/rollup/putjob/put_job.go +++ b/typedapi/rollup/putjob/put_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates a rollup job. package putjob @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -322,6 +323,50 @@ func (r *PutJob) _id(id string) *PutJob { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutJob) ErrorTrace(errortrace bool) *PutJob { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutJob) FilterPath(filterpaths ...string) *PutJob { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutJob) Human(human bool) *PutJob { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutJob) Pretty(pretty bool) *PutJob { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Cron A cron string which defines the intervals when the rollup job should be // executed. When the interval // triggers, the indexer attempts to rollup the data in the index pattern. The diff --git a/typedapi/rollup/putjob/request.go b/typedapi/rollup/putjob/request.go index 46b4c40394..f443d6e14b 100644 --- a/typedapi/rollup/putjob/request.go +++ b/typedapi/rollup/putjob/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putjob @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putjob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/put_job/CreateRollupJobRequest.ts#L27-L89 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/put_job/CreateRollupJobRequest.ts#L27-L89 type Request struct { // Cron A cron string which defines the intervals when the rollup job should be @@ -87,6 +87,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -157,7 +158,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "page_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/rollup/putjob/response.go b/typedapi/rollup/putjob/response.go index 761ca2f291..265a26b4db 100644 --- a/typedapi/rollup/putjob/response.go +++ b/typedapi/rollup/putjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putjob // Response holds the response body struct for the package putjob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/put_job/CreateRollupJobResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/put_job/CreateRollupJobResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/rollup/rollupsearch/request.go b/typedapi/rollup/rollupsearch/request.go index abb6bb4d2c..bbad07f188 100644 --- a/typedapi/rollup/rollupsearch/request.go +++ b/typedapi/rollup/rollupsearch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package rollupsearch @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package rollupsearch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/rollup_search/RollupSearchRequest.ts#L27-L57 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/rollup_search/RollupSearchRequest.ts#L27-L57 type Request struct { // Aggregations Specifies aggregations. @@ -49,6 +49,7 @@ func NewRequest() *Request { r := &Request{ Aggregations: make(map[string]types.Aggregations, 0), } + return r } @@ -93,7 +94,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/rollup/rollupsearch/response.go b/typedapi/rollup/rollupsearch/response.go index c6b6c2a467..52352f1642 100644 --- a/typedapi/rollup/rollupsearch/response.go +++ b/typedapi/rollup/rollupsearch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package rollupsearch @@ -34,7 +34,7 @@ import ( // Response holds the response body struct for the package rollupsearch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/rollup_search/RollupSearchResponse.ts#L27-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/rollup_search/RollupSearchResponse.ts#L27-L36 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` Hits types.HitsMetadata `json:"hits"` @@ -522,7 +522,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := types.NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -572,7 +572,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -582,7 +582,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -602,7 +602,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "terminated_early": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -616,7 +616,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "timed_out": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -630,7 +630,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "took": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/rollup/rollupsearch/rollup_search.go b/typedapi/rollup/rollupsearch/rollup_search.go index 9109363fb4..e62c6f87ca 100644 --- a/typedapi/rollup/rollupsearch/rollup_search.go +++ b/typedapi/rollup/rollupsearch/rollup_search.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Enables searching rolled-up data using the standard query DSL. +// Enables searching rolled-up data using the standard Query DSL. package rollupsearch import ( @@ -81,7 +81,7 @@ func NewRollupSearchFunc(tp elastictransport.Interface) NewRollupSearch { } } -// Enables searching rolled-up data using the standard query DSL. +// Enables searching rolled-up data using the standard Query DSL. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-search.html func New(tp elastictransport.Interface) *RollupSearch { @@ -333,6 +333,50 @@ func (r *RollupSearch) TypedKeys(typedkeys bool) *RollupSearch { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *RollupSearch) ErrorTrace(errortrace bool) *RollupSearch { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *RollupSearch) FilterPath(filterpaths ...string) *RollupSearch { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *RollupSearch) Human(human bool) *RollupSearch { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *RollupSearch) Pretty(pretty bool) *RollupSearch { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Aggregations Specifies aggregations. // API name: aggregations func (r *RollupSearch) Aggregations(aggregations map[string]types.Aggregations) *RollupSearch { diff --git a/typedapi/rollup/startjob/response.go b/typedapi/rollup/startjob/response.go index 161e04d668..dafac08f43 100644 --- a/typedapi/rollup/startjob/response.go +++ b/typedapi/rollup/startjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package startjob // Response holds the response body struct for the package startjob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/start_job/StartRollupJobResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/start_job/StartRollupJobResponse.ts#L20-L22 type Response struct { Started bool `json:"started"` } diff --git a/typedapi/rollup/startjob/start_job.go b/typedapi/rollup/startjob/start_job.go index 57dedcbe48..dc97fb78f1 100644 --- a/typedapi/rollup/startjob/start_job.go +++ b/typedapi/rollup/startjob/start_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Starts an existing, stopped rollup job. package startjob @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r StartJob) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -300,3 +300,47 @@ func (r *StartJob) _id(id string) *StartJob { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *StartJob) ErrorTrace(errortrace bool) *StartJob { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *StartJob) FilterPath(filterpaths ...string) *StartJob { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *StartJob) Human(human bool) *StartJob { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *StartJob) Pretty(pretty bool) *StartJob { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/rollup/stopjob/response.go b/typedapi/rollup/stopjob/response.go index c676d3c069..18c3b9e62f 100644 --- a/typedapi/rollup/stopjob/response.go +++ b/typedapi/rollup/stopjob/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stopjob // Response holds the response body struct for the package stopjob // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/stop_job/StopRollupJobResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/stop_job/StopRollupJobResponse.ts#L20-L22 type Response struct { Stopped bool `json:"stopped"` } diff --git a/typedapi/rollup/stopjob/stop_job.go b/typedapi/rollup/stopjob/stop_job.go index 529f35315c..78f24852d0 100644 --- a/typedapi/rollup/stopjob/stop_job.go +++ b/typedapi/rollup/stopjob/stop_job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Stops an existing, started rollup job. package stopjob @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -265,7 +264,7 @@ func (r StopJob) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -322,3 +321,47 @@ func (r *StopJob) WaitForCompletion(waitforcompletion bool) *StopJob { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *StopJob) ErrorTrace(errortrace bool) *StopJob { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *StopJob) FilterPath(filterpaths ...string) *StopJob { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *StopJob) Human(human bool) *StopJob { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *StopJob) Pretty(pretty bool) *StopJob { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/searchablesnapshots/cachestats/cache_stats.go b/typedapi/searchablesnapshots/cachestats/cache_stats.go index 08efbc87cb..dd1dbbc3ef 100644 --- a/typedapi/searchablesnapshots/cachestats/cache_stats.go +++ b/typedapi/searchablesnapshots/cachestats/cache_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieve node-level cache statistics about searchable snapshots. package cachestats @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -271,7 +271,7 @@ func (r CacheStats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -316,3 +316,47 @@ func (r *CacheStats) MasterTimeout(duration string) *CacheStats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *CacheStats) ErrorTrace(errortrace bool) *CacheStats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *CacheStats) FilterPath(filterpaths ...string) *CacheStats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *CacheStats) Human(human bool) *CacheStats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *CacheStats) Pretty(pretty bool) *CacheStats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/searchablesnapshots/cachestats/response.go b/typedapi/searchablesnapshots/cachestats/response.go index 1ca35fe59a..45ca4f2514 100644 --- a/typedapi/searchablesnapshots/cachestats/response.go +++ b/typedapi/searchablesnapshots/cachestats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package cachestats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package cachestats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/searchable_snapshots/cache_stats/Response.ts#L24-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/searchable_snapshots/cache_stats/Response.ts#L24-L28 type Response struct { Nodes map[string]types.Node `json:"nodes"` } diff --git a/typedapi/searchablesnapshots/clearcache/clear_cache.go b/typedapi/searchablesnapshots/clearcache/clear_cache.go index 92cc6b9d36..0e5738c023 100644 --- a/typedapi/searchablesnapshots/clearcache/clear_cache.go +++ b/typedapi/searchablesnapshots/clearcache/clear_cache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Clear the cache of searchable snapshots. package clearcache @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -273,7 +272,7 @@ func (r ClearCache) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -354,3 +353,25 @@ func (r *ClearCache) Human(human bool) *ClearCache { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ClearCache) ErrorTrace(errortrace bool) *ClearCache { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ClearCache) FilterPath(filterpaths ...string) *ClearCache { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} diff --git a/typedapi/searchablesnapshots/clearcache/response.go b/typedapi/searchablesnapshots/clearcache/response.go index b9dd97ed37..ec2c35ba4c 100644 --- a/typedapi/searchablesnapshots/clearcache/response.go +++ b/typedapi/searchablesnapshots/clearcache/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clearcache @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcache // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/searchable_snapshots/clear_cache/SearchableSnapshotsClearCacheResponse.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/searchable_snapshots/clear_cache/SearchableSnapshotsClearCacheResponse.ts#L22-L25 type Response = json.RawMessage diff --git a/typedapi/searchablesnapshots/mount/mount.go b/typedapi/searchablesnapshots/mount/mount.go index 6db2413a33..966531482f 100644 --- a/typedapi/searchablesnapshots/mount/mount.go +++ b/typedapi/searchablesnapshots/mount/mount.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Mount a snapshot as a searchable index. package mount @@ -360,6 +360,50 @@ func (r *Mount) Storage(storage string) *Mount { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Mount) ErrorTrace(errortrace bool) *Mount { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Mount) FilterPath(filterpaths ...string) *Mount { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Mount) Human(human bool) *Mount { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Mount) Pretty(pretty bool) *Mount { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: ignore_index_settings func (r *Mount) IgnoreIndexSettings(ignoreindexsettings ...string) *Mount { r.req.IgnoreIndexSettings = ignoreindexsettings diff --git a/typedapi/searchablesnapshots/mount/request.go b/typedapi/searchablesnapshots/mount/request.go index 87c7ecf730..3d9c5fd5ac 100644 --- a/typedapi/searchablesnapshots/mount/request.go +++ b/typedapi/searchablesnapshots/mount/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package mount @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package mount // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/searchable_snapshots/mount/SearchableSnapshotsMountRequest.ts#L26-L49 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/searchable_snapshots/mount/SearchableSnapshotsMountRequest.ts#L26-L49 type Request struct { IgnoreIndexSettings []string `json:"ignore_index_settings,omitempty"` Index string `json:"index"` @@ -43,6 +43,7 @@ func NewRequest() *Request { r := &Request{ IndexSettings: make(map[string]json.RawMessage, 0), } + return r } diff --git a/typedapi/searchablesnapshots/mount/response.go b/typedapi/searchablesnapshots/mount/response.go index 6bf5f40584..2b7a200dee 100644 --- a/typedapi/searchablesnapshots/mount/response.go +++ b/typedapi/searchablesnapshots/mount/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package mount @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package mount // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/searchable_snapshots/mount/SearchableSnapshotsMountResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/searchable_snapshots/mount/SearchableSnapshotsMountResponse.ts#L22-L26 type Response struct { Snapshot types.MountedSnapshot `json:"snapshot"` } diff --git a/typedapi/searchablesnapshots/stats/response.go b/typedapi/searchablesnapshots/stats/response.go index 43e505c705..990bb1f4f8 100644 --- a/typedapi/searchablesnapshots/stats/response.go +++ b/typedapi/searchablesnapshots/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/searchable_snapshots/stats/SearchableSnapshotsStatsResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/searchable_snapshots/stats/SearchableSnapshotsStatsResponse.ts#L22-L27 type Response struct { Stats json.RawMessage `json:"stats,omitempty"` Total json.RawMessage `json:"total,omitempty"` diff --git a/typedapi/searchablesnapshots/stats/stats.go b/typedapi/searchablesnapshots/stats/stats.go index 9800b9c297..bd706f95b9 100644 --- a/typedapi/searchablesnapshots/stats/stats.go +++ b/typedapi/searchablesnapshots/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieve shard-level statistics about searchable snapshots. package stats @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -268,7 +268,7 @@ func (r Stats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -312,3 +312,47 @@ func (r *Stats) Level(level statslevel.StatsLevel) *Stats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Stats) ErrorTrace(errortrace bool) *Stats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Stats) FilterPath(filterpaths ...string) *Stats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Stats) Human(human bool) *Stats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Stats) Pretty(pretty bool) *Stats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/searchapplication/delete/delete.go b/typedapi/searchapplication/delete/delete.go index e9037bd0a0..207cc46ec4 100644 --- a/typedapi/searchapplication/delete/delete.go +++ b/typedapi/searchapplication/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes a search application. package delete @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -262,7 +262,7 @@ func (r Delete) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -298,3 +298,47 @@ func (r *Delete) _name(name string) *Delete { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Delete) ErrorTrace(errortrace bool) *Delete { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Delete) FilterPath(filterpaths ...string) *Delete { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Delete) Human(human bool) *Delete { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Delete) Pretty(pretty bool) *Delete { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/searchapplication/delete/response.go b/typedapi/searchapplication/delete/response.go index 18972b7601..5265995523 100644 --- a/typedapi/searchapplication/delete/response.go +++ b/typedapi/searchapplication/delete/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package delete // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/search_application/delete/SearchApplicationsDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/search_application/delete/SearchApplicationsDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/searchapplication/deletebehavioralanalytics/delete_behavioral_analytics.go b/typedapi/searchapplication/deletebehavioralanalytics/delete_behavioral_analytics.go index 5054848bf4..2ef2d0759c 100644 --- a/typedapi/searchapplication/deletebehavioralanalytics/delete_behavioral_analytics.go +++ b/typedapi/searchapplication/deletebehavioralanalytics/delete_behavioral_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Delete a behavioral analytics collection. package deletebehavioralanalytics @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -262,7 +262,7 @@ func (r DeleteBehavioralAnalytics) IsSuccess(providedCtx context.Context) (bool, if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -298,3 +298,47 @@ func (r *DeleteBehavioralAnalytics) _name(name string) *DeleteBehavioralAnalytic return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteBehavioralAnalytics) ErrorTrace(errortrace bool) *DeleteBehavioralAnalytics { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteBehavioralAnalytics) FilterPath(filterpaths ...string) *DeleteBehavioralAnalytics { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteBehavioralAnalytics) Human(human bool) *DeleteBehavioralAnalytics { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteBehavioralAnalytics) Pretty(pretty bool) *DeleteBehavioralAnalytics { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/searchapplication/deletebehavioralanalytics/response.go b/typedapi/searchapplication/deletebehavioralanalytics/response.go index 5647e2bc41..26795b14cb 100644 --- a/typedapi/searchapplication/deletebehavioralanalytics/response.go +++ b/typedapi/searchapplication/deletebehavioralanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletebehavioralanalytics // Response holds the response body struct for the package deletebehavioralanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/search_application/delete_behavioral_analytics/BehavioralAnalyticsDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/search_application/delete_behavioral_analytics/BehavioralAnalyticsDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/searchapplication/get/get.go b/typedapi/searchapplication/get/get.go index 963248ca62..27b7401100 100644 --- a/typedapi/searchapplication/get/get.go +++ b/typedapi/searchapplication/get/get.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns the details about a search application. +// Returns the details about a search application package get import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +76,7 @@ func NewGetFunc(tp elastictransport.Interface) NewGet { } } -// Returns the details about a search application. +// Returns the details about a search application // // https://www.elastic.co/guide/en/elasticsearch/reference/current/get-search-application.html func New(tp elastictransport.Interface) *Get { @@ -262,7 +262,7 @@ func (r Get) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -298,3 +298,47 @@ func (r *Get) _name(name string) *Get { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Get) ErrorTrace(errortrace bool) *Get { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Get) FilterPath(filterpaths ...string) *Get { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Get) Human(human bool) *Get { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Get) Pretty(pretty bool) *Get { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/searchapplication/get/response.go b/typedapi/searchapplication/get/response.go index 999ff08044..556543605c 100644 --- a/typedapi/searchapplication/get/response.go +++ b/typedapi/searchapplication/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/search_application/get/SearchApplicationsGetResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/search_application/get/SearchApplicationsGetResponse.ts#L22-L24 type Response struct { // AnalyticsCollectionName Analytics collection associated to the Search Application. diff --git a/typedapi/searchapplication/getbehavioralanalytics/get_behavioral_analytics.go b/typedapi/searchapplication/getbehavioralanalytics/get_behavioral_analytics.go index 9e48ab08ed..d8769e8251 100644 --- a/typedapi/searchapplication/getbehavioralanalytics/get_behavioral_analytics.go +++ b/typedapi/searchapplication/getbehavioralanalytics/get_behavioral_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns the existing behavioral analytics collections. package getbehavioralanalytics @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -267,7 +267,7 @@ func (r GetBehavioralAnalytics) IsSuccess(providedCtx context.Context) (bool, er if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -303,3 +303,47 @@ func (r *GetBehavioralAnalytics) Name(name string) *GetBehavioralAnalytics { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetBehavioralAnalytics) ErrorTrace(errortrace bool) *GetBehavioralAnalytics { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetBehavioralAnalytics) FilterPath(filterpaths ...string) *GetBehavioralAnalytics { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetBehavioralAnalytics) Human(human bool) *GetBehavioralAnalytics { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetBehavioralAnalytics) Pretty(pretty bool) *GetBehavioralAnalytics { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/searchapplication/getbehavioralanalytics/response.go b/typedapi/searchapplication/getbehavioralanalytics/response.go index 3709e34609..33b46abf68 100644 --- a/typedapi/searchapplication/getbehavioralanalytics/response.go +++ b/typedapi/searchapplication/getbehavioralanalytics/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getbehavioralanalytics @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getbehavioralanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/search_application/get_behavioral_analytics/BehavioralAnalyticsGetResponse.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/search_application/get_behavioral_analytics/BehavioralAnalyticsGetResponse.ts#L24-L27 type Response map[string]types.AnalyticsCollection diff --git a/typedapi/searchapplication/list/list.go b/typedapi/searchapplication/list/list.go index 8c50326d92..f5136e8b94 100644 --- a/typedapi/searchapplication/list/list.go +++ b/typedapi/searchapplication/list/list.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns the existing search applications. package list @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -249,7 +248,7 @@ func (r List) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -300,3 +299,47 @@ func (r *List) Size(size int) *List { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *List) ErrorTrace(errortrace bool) *List { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *List) FilterPath(filterpaths ...string) *List { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *List) Human(human bool) *List { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *List) Pretty(pretty bool) *List { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/searchapplication/list/response.go b/typedapi/searchapplication/list/response.go index 8fa32c164c..cea5e4aec8 100644 --- a/typedapi/searchapplication/list/response.go +++ b/typedapi/searchapplication/list/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package list @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package list // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/search_application/list/SearchApplicationsListResponse.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/search_application/list/SearchApplicationsListResponse.ts#L24-L29 type Response struct { Count int64 `json:"count"` Results []types.SearchApplicationListItem `json:"results"` diff --git a/typedapi/searchapplication/put/put.go b/typedapi/searchapplication/put/put.go index b65b5a0577..442c59f990 100644 --- a/typedapi/searchapplication/put/put.go +++ b/typedapi/searchapplication/put/put.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates or updates a search application. package put @@ -91,6 +91,8 @@ func New(tp elastictransport.Interface) *Put { headers: make(http.Header), buf: gobytes.NewBuffer(nil), + + req: NewRequest(), } if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { @@ -322,6 +324,50 @@ func (r *Put) Create(create bool) *Put { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Put) ErrorTrace(errortrace bool) *Put { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Put) FilterPath(filterpaths ...string) *Put { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Put) Human(human bool) *Put { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Put) Pretty(pretty bool) *Put { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // AnalyticsCollectionName Analytics collection associated to the Search Application. // API name: analytics_collection_name func (r *Put) AnalyticsCollectionName(name string) *Put { diff --git a/typedapi/searchapplication/put/request.go b/typedapi/searchapplication/put/request.go index 8abd0bdd71..d264df4f58 100644 --- a/typedapi/searchapplication/put/request.go +++ b/typedapi/searchapplication/put/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package put @@ -26,5 +26,12 @@ import ( // Request holds the request body struct for the package put // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/search_application/put/SearchApplicationsPutRequest.ts#L23-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/search_application/put/SearchApplicationsPutRequest.ts#L23-L48 type Request = types.SearchApplication + +// NewRequest returns a Request +func NewRequest() *Request { + r := types.NewSearchApplication() + + return r +} diff --git a/typedapi/searchapplication/put/response.go b/typedapi/searchapplication/put/response.go index 6de854c2b0..f466c689ee 100644 --- a/typedapi/searchapplication/put/response.go +++ b/typedapi/searchapplication/put/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package put @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package put // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/search_application/put/SearchApplicationsPutResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/search_application/put/SearchApplicationsPutResponse.ts#L22-L26 type Response struct { Result result.Result `json:"result"` } diff --git a/typedapi/searchapplication/putbehavioralanalytics/put_behavioral_analytics.go b/typedapi/searchapplication/putbehavioralanalytics/put_behavioral_analytics.go index a6ace5057b..8136008e01 100644 --- a/typedapi/searchapplication/putbehavioralanalytics/put_behavioral_analytics.go +++ b/typedapi/searchapplication/putbehavioralanalytics/put_behavioral_analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates a behavioral analytics collection. package putbehavioralanalytics @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -262,7 +262,7 @@ func (r PutBehavioralAnalytics) IsSuccess(providedCtx context.Context) (bool, er if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -298,3 +298,47 @@ func (r *PutBehavioralAnalytics) _name(name string) *PutBehavioralAnalytics { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutBehavioralAnalytics) ErrorTrace(errortrace bool) *PutBehavioralAnalytics { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutBehavioralAnalytics) FilterPath(filterpaths ...string) *PutBehavioralAnalytics { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutBehavioralAnalytics) Human(human bool) *PutBehavioralAnalytics { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutBehavioralAnalytics) Pretty(pretty bool) *PutBehavioralAnalytics { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/searchapplication/putbehavioralanalytics/response.go b/typedapi/searchapplication/putbehavioralanalytics/response.go index d1f13dc008..48e08bddf5 100644 --- a/typedapi/searchapplication/putbehavioralanalytics/response.go +++ b/typedapi/searchapplication/putbehavioralanalytics/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putbehavioralanalytics // Response holds the response body struct for the package putbehavioralanalytics // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/search_application/put_behavioral_analytics/BehavioralAnalyticsPutResponse.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/search_application/put_behavioral_analytics/BehavioralAnalyticsPutResponse.ts#L24-L26 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/searchapplication/search/request.go b/typedapi/searchapplication/search/request.go index 0953c7ab64..201b987d75 100644 --- a/typedapi/searchapplication/search/request.go +++ b/typedapi/searchapplication/search/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package search @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/search_application/search/SearchApplicationsSearchRequest.ts#L24-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/search_application/search/SearchApplicationsSearchRequest.ts#L24-L52 type Request struct { // Params Query parameters specific to this request, which will override any defaults @@ -40,6 +40,7 @@ func NewRequest() *Request { r := &Request{ Params: make(map[string]json.RawMessage, 0), } + return r } diff --git a/typedapi/searchapplication/search/response.go b/typedapi/searchapplication/search/response.go index d16fd64521..825666e3d8 100644 --- a/typedapi/searchapplication/search/response.go +++ b/typedapi/searchapplication/search/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package search @@ -34,7 +34,7 @@ import ( // Response holds the response body struct for the package search // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/search_application/search/SearchApplicationsSearchResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/search_application/search/SearchApplicationsSearchResponse.ts#L23-L25 type Response struct { Aggregations map[string]types.Aggregate `json:"aggregations,omitempty"` Clusters_ *types.ClusterStatistics `json:"_clusters,omitempty"` @@ -532,7 +532,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := types.NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -582,7 +582,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -592,7 +592,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -620,7 +620,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "max_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -636,7 +636,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "num_reduce_phases": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -714,7 +714,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -724,7 +724,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Suggest") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -734,7 +734,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "terminated_early": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -748,7 +748,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "timed_out": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -762,7 +762,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "took": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/searchapplication/search/search.go b/typedapi/searchapplication/search/search.go index 602a2ddb5b..c2e946f316 100644 --- a/typedapi/searchapplication/search/search.go +++ b/typedapi/searchapplication/search/search.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Perform a search against a search application +// Perform a search against a search application. package search import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -80,7 +81,7 @@ func NewSearchFunc(tp elastictransport.Interface) NewSearch { } } -// Perform a search against a search application +// Perform a search against a search application. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/search-application-search.html func New(tp elastictransport.Interface) *Search { @@ -260,6 +261,8 @@ func (r Search) Do(providedCtx context.Context) (*Response, error) { response := NewResponse() + r.TypedKeys(true) + res, err := r.Perform(ctx) if err != nil { if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { @@ -316,6 +319,59 @@ func (r *Search) _name(name string) *Search { return r } +// TypedKeys Determines whether aggregation names are prefixed by their respective types +// in the response. +// API name: typed_keys +func (r *Search) TypedKeys(typedkeys bool) *Search { + r.values.Set("typed_keys", strconv.FormatBool(typedkeys)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Search) ErrorTrace(errortrace bool) *Search { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Search) FilterPath(filterpaths ...string) *Search { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Search) Human(human bool) *Search { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Search) Pretty(pretty bool) *Search { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Params Query parameters specific to this request, which will override any defaults // specified in the template. // API name: params diff --git a/typedapi/security/activateuserprofile/activate_user_profile.go b/typedapi/security/activateuserprofile/activate_user_profile.go index 4a26350216..380c30d1ae 100644 --- a/typedapi/security/activateuserprofile/activate_user_profile.go +++ b/typedapi/security/activateuserprofile/activate_user_profile.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates or updates the user profile on behalf of another user. +// Creates or updates a user profile on behalf of another user. package activateuserprofile import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -73,7 +74,7 @@ func NewActivateUserProfileFunc(tp elastictransport.Interface) NewActivateUserPr } } -// Creates or updates the user profile on behalf of another user. +// Creates or updates a user profile on behalf of another user. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-activate-user-profile.html func New(tp elastictransport.Interface) *ActivateUserProfile { @@ -294,6 +295,50 @@ func (r *ActivateUserProfile) Header(key, value string) *ActivateUserProfile { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ActivateUserProfile) ErrorTrace(errortrace bool) *ActivateUserProfile { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ActivateUserProfile) FilterPath(filterpaths ...string) *ActivateUserProfile { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ActivateUserProfile) Human(human bool) *ActivateUserProfile { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ActivateUserProfile) Pretty(pretty bool) *ActivateUserProfile { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: access_token func (r *ActivateUserProfile) AccessToken(accesstoken string) *ActivateUserProfile { diff --git a/typedapi/security/activateuserprofile/request.go b/typedapi/security/activateuserprofile/request.go index bb326d81a3..58024aed9e 100644 --- a/typedapi/security/activateuserprofile/request.go +++ b/typedapi/security/activateuserprofile/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package activateuserprofile @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package activateuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/activate_user_profile/Request.ts#L23-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/activate_user_profile/Request.ts#L23-L37 type Request struct { AccessToken *string `json:"access_token,omitempty"` GrantType granttype.GrantType `json:"grant_type"` @@ -40,6 +40,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/security/activateuserprofile/response.go b/typedapi/security/activateuserprofile/response.go index 35bf143c5b..edbda0c25c 100644 --- a/typedapi/security/activateuserprofile/response.go +++ b/typedapi/security/activateuserprofile/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package activateuserprofile @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package activateuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/activate_user_profile/Response.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/activate_user_profile/Response.ts#L22-L24 type Response struct { Data map[string]json.RawMessage `json:"data"` Doc_ types.UserProfileHitMetadata `json:"_doc"` diff --git a/typedapi/security/authenticate/authenticate.go b/typedapi/security/authenticate/authenticate.go index 71fd8c8a0c..e671f0b58b 100644 --- a/typedapi/security/authenticate/authenticate.go +++ b/typedapi/security/authenticate/authenticate.go @@ -16,10 +16,15 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Enables authentication as a user and retrieve information about the -// authenticated user. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Enables you to submit a request with a basic auth header to authenticate a +// user and retrieve information about the authenticated user. +// A successful call returns a JSON structure that shows user information such +// as their username, the roles that are assigned to the user, any assigned +// metadata, and information about the realms that authenticated and authorized +// the user. +// If the user cannot be authenticated, this API returns a 401 status code. package authenticate import ( @@ -28,9 +33,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -69,8 +74,13 @@ func NewAuthenticateFunc(tp elastictransport.Interface) NewAuthenticate { } } -// Enables authentication as a user and retrieve information about the -// authenticated user. +// Enables you to submit a request with a basic auth header to authenticate a +// user and retrieve information about the authenticated user. +// A successful call returns a JSON structure that shows user information such +// as their username, the roles that are assigned to the user, any assigned +// metadata, and information about the realms that authenticated and authorized +// the user. +// If the user cannot be authenticated, this API returns a 401 status code. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-authenticate.html func New(tp elastictransport.Interface) *Authenticate { @@ -250,7 +260,7 @@ func (r Authenticate) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -277,3 +287,47 @@ func (r *Authenticate) Header(key, value string) *Authenticate { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Authenticate) ErrorTrace(errortrace bool) *Authenticate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Authenticate) FilterPath(filterpaths ...string) *Authenticate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Authenticate) Human(human bool) *Authenticate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Authenticate) Pretty(pretty bool) *Authenticate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/authenticate/response.go b/typedapi/security/authenticate/response.go index 8b31d85e1b..eb45fa64cc 100644 --- a/typedapi/security/authenticate/response.go +++ b/typedapi/security/authenticate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package authenticate @@ -26,14 +26,14 @@ import ( // Response holds the response body struct for the package authenticate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/authenticate/SecurityAuthenticateResponse.ts#L25-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/authenticate/SecurityAuthenticateResponse.ts#L25-L43 type Response struct { ApiKey *types.ApiKey `json:"api_key,omitempty"` AuthenticationRealm types.RealmInfo `json:"authentication_realm"` AuthenticationType string `json:"authentication_type"` - Email string `json:"email,omitempty"` + Email *string `json:"email,omitempty"` Enabled bool `json:"enabled"` - FullName string `json:"full_name,omitempty"` + FullName *string `json:"full_name,omitempty"` LookupRealm types.RealmInfo `json:"lookup_realm"` Metadata types.Metadata `json:"metadata"` Roles []string `json:"roles"` diff --git a/typedapi/security/bulkupdateapikeys/bulk_update_api_keys.go b/typedapi/security/bulkupdateapikeys/bulk_update_api_keys.go index c64c6a9d46..ed7db71e7e 100644 --- a/typedapi/security/bulkupdateapikeys/bulk_update_api_keys.go +++ b/typedapi/security/bulkupdateapikeys/bulk_update_api_keys.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Updates the attributes of multiple existing API keys. package bulkupdateapikeys @@ -26,7 +26,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strings" @@ -205,7 +204,7 @@ func (r BulkUpdateApiKeys) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err diff --git a/typedapi/security/changepassword/change_password.go b/typedapi/security/changepassword/change_password.go index 1190b37759..6513853673 100644 --- a/typedapi/security/changepassword/change_password.go +++ b/typedapi/security/changepassword/change_password.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Changes the passwords of users in the native realm and built-in users. package changepassword @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -335,6 +336,50 @@ func (r *ChangePassword) Refresh(refresh refresh.Refresh) *ChangePassword { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ChangePassword) ErrorTrace(errortrace bool) *ChangePassword { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ChangePassword) FilterPath(filterpaths ...string) *ChangePassword { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ChangePassword) Human(human bool) *ChangePassword { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ChangePassword) Pretty(pretty bool) *ChangePassword { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Password The new password value. Passwords must be at least 6 characters long. // API name: password func (r *ChangePassword) Password(password string) *ChangePassword { diff --git a/typedapi/security/changepassword/request.go b/typedapi/security/changepassword/request.go index f11cece0a4..9816d914f2 100644 --- a/typedapi/security/changepassword/request.go +++ b/typedapi/security/changepassword/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package changepassword @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package changepassword // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/change_password/SecurityChangePasswordRequest.ts#L23-L51 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/change_password/SecurityChangePasswordRequest.ts#L23-L51 type Request struct { // Password The new password value. Passwords must be at least 6 characters long. @@ -47,6 +47,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/security/changepassword/response.go b/typedapi/security/changepassword/response.go index 6483ab7f78..53e453fd1a 100644 --- a/typedapi/security/changepassword/response.go +++ b/typedapi/security/changepassword/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package changepassword // Response holds the response body struct for the package changepassword // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/change_password/SecurityChangePasswordResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/change_password/SecurityChangePasswordResponse.ts#L20-L22 type Response struct { } diff --git a/typedapi/security/clearapikeycache/clear_api_key_cache.go b/typedapi/security/clearapikeycache/clear_api_key_cache.go index e81cfb0327..949d701880 100644 --- a/typedapi/security/clearapikeycache/clear_api_key_cache.go +++ b/typedapi/security/clearapikeycache/clear_api_key_cache.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Clear a subset or all entries from the API key cache. +// Evicts a subset of all entries from the API key cache. +// The cache is also automatically cleared on state changes of the security +// index. package clearapikeycache import ( @@ -27,9 +29,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +78,9 @@ func NewClearApiKeyCacheFunc(tp elastictransport.Interface) NewClearApiKeyCache } } -// Clear a subset or all entries from the API key cache. +// Evicts a subset of all entries from the API key cache. +// The cache is also automatically cleared on state changes of the security +// index. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-clear-api-key-cache.html func New(tp elastictransport.Interface) *ClearApiKeyCache { @@ -264,7 +268,7 @@ func (r ClearApiKeyCache) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -302,3 +306,47 @@ func (r *ClearApiKeyCache) _ids(ids string) *ClearApiKeyCache { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ClearApiKeyCache) ErrorTrace(errortrace bool) *ClearApiKeyCache { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ClearApiKeyCache) FilterPath(filterpaths ...string) *ClearApiKeyCache { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ClearApiKeyCache) Human(human bool) *ClearApiKeyCache { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ClearApiKeyCache) Pretty(pretty bool) *ClearApiKeyCache { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/clearapikeycache/response.go b/typedapi/security/clearapikeycache/response.go index 7963ce6aeb..45d6d55a96 100644 --- a/typedapi/security/clearapikeycache/response.go +++ b/typedapi/security/clearapikeycache/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clearapikeycache @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearapikeycache // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/clear_api_key_cache/SecurityClearApiKeyCacheResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/clear_api_key_cache/SecurityClearApiKeyCacheResponse.ts#L25-L32 type Response struct { ClusterName string `json:"cluster_name"` NodeStats types.NodeStatistics `json:"_nodes"` diff --git a/typedapi/security/clearcachedprivileges/clear_cached_privileges.go b/typedapi/security/clearcachedprivileges/clear_cached_privileges.go index 3aa788ff58..44e8e841cb 100644 --- a/typedapi/security/clearcachedprivileges/clear_cached_privileges.go +++ b/typedapi/security/clearcachedprivileges/clear_cached_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Evicts application privileges from the native application privileges cache. package clearcachedprivileges @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r ClearCachedPrivileges) IsSuccess(providedCtx context.Context) (bool, err if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -300,3 +300,47 @@ func (r *ClearCachedPrivileges) _application(application string) *ClearCachedPri return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ClearCachedPrivileges) ErrorTrace(errortrace bool) *ClearCachedPrivileges { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ClearCachedPrivileges) FilterPath(filterpaths ...string) *ClearCachedPrivileges { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ClearCachedPrivileges) Human(human bool) *ClearCachedPrivileges { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ClearCachedPrivileges) Pretty(pretty bool) *ClearCachedPrivileges { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/clearcachedprivileges/response.go b/typedapi/security/clearcachedprivileges/response.go index 4da37f129f..d790c94161 100644 --- a/typedapi/security/clearcachedprivileges/response.go +++ b/typedapi/security/clearcachedprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clearcachedprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcachedprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/clear_cached_privileges/SecurityClearCachedPrivilegesResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/clear_cached_privileges/SecurityClearCachedPrivilegesResponse.ts#L25-L32 type Response struct { ClusterName string `json:"cluster_name"` NodeStats types.NodeStatistics `json:"_nodes"` diff --git a/typedapi/security/clearcachedrealms/clear_cached_realms.go b/typedapi/security/clearcachedrealms/clear_cached_realms.go index f432c84bb6..9cac9a1ce5 100644 --- a/typedapi/security/clearcachedrealms/clear_cached_realms.go +++ b/typedapi/security/clearcachedrealms/clear_cached_realms.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Evicts users from the user cache. Can completely clear the cache or evict // specific users. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -266,7 +266,7 @@ func (r ClearCachedRealms) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -314,3 +314,47 @@ func (r *ClearCachedRealms) Usernames(usernames ...string) *ClearCachedRealms { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ClearCachedRealms) ErrorTrace(errortrace bool) *ClearCachedRealms { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ClearCachedRealms) FilterPath(filterpaths ...string) *ClearCachedRealms { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ClearCachedRealms) Human(human bool) *ClearCachedRealms { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ClearCachedRealms) Pretty(pretty bool) *ClearCachedRealms { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/clearcachedrealms/response.go b/typedapi/security/clearcachedrealms/response.go index fc375528ca..222c927620 100644 --- a/typedapi/security/clearcachedrealms/response.go +++ b/typedapi/security/clearcachedrealms/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clearcachedrealms @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcachedrealms // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/clear_cached_realms/SecurityClearCachedRealmsResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/clear_cached_realms/SecurityClearCachedRealmsResponse.ts#L25-L32 type Response struct { ClusterName string `json:"cluster_name"` NodeStats types.NodeStatistics `json:"_nodes"` diff --git a/typedapi/security/clearcachedroles/clear_cached_roles.go b/typedapi/security/clearcachedroles/clear_cached_roles.go index 465dbf7b19..108ac783c9 100644 --- a/typedapi/security/clearcachedroles/clear_cached_roles.go +++ b/typedapi/security/clearcachedroles/clear_cached_roles.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Evicts roles from the native role cache. package clearcachedroles @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r ClearCachedRoles) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -300,3 +300,47 @@ func (r *ClearCachedRoles) _name(name string) *ClearCachedRoles { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ClearCachedRoles) ErrorTrace(errortrace bool) *ClearCachedRoles { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ClearCachedRoles) FilterPath(filterpaths ...string) *ClearCachedRoles { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ClearCachedRoles) Human(human bool) *ClearCachedRoles { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ClearCachedRoles) Pretty(pretty bool) *ClearCachedRoles { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/clearcachedroles/response.go b/typedapi/security/clearcachedroles/response.go index a7c40bd523..a486ac22ac 100644 --- a/typedapi/security/clearcachedroles/response.go +++ b/typedapi/security/clearcachedroles/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clearcachedroles @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcachedroles // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/clear_cached_roles/ClearCachedRolesResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/clear_cached_roles/ClearCachedRolesResponse.ts#L25-L32 type Response struct { ClusterName string `json:"cluster_name"` NodeStats types.NodeStatistics `json:"_nodes"` diff --git a/typedapi/security/clearcachedservicetokens/clear_cached_service_tokens.go b/typedapi/security/clearcachedservicetokens/clear_cached_service_tokens.go index 1420aa2295..28ca93ebe5 100644 --- a/typedapi/security/clearcachedservicetokens/clear_cached_service_tokens.go +++ b/typedapi/security/clearcachedservicetokens/clear_cached_service_tokens.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Evicts tokens from the service account token caches. package clearcachedservicetokens @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -290,7 +290,7 @@ func (r ClearCachedServiceTokens) IsSuccess(providedCtx context.Context) (bool, if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -344,3 +344,47 @@ func (r *ClearCachedServiceTokens) _name(name string) *ClearCachedServiceTokens return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ClearCachedServiceTokens) ErrorTrace(errortrace bool) *ClearCachedServiceTokens { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ClearCachedServiceTokens) FilterPath(filterpaths ...string) *ClearCachedServiceTokens { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ClearCachedServiceTokens) Human(human bool) *ClearCachedServiceTokens { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ClearCachedServiceTokens) Pretty(pretty bool) *ClearCachedServiceTokens { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/clearcachedservicetokens/response.go b/typedapi/security/clearcachedservicetokens/response.go index a186649b29..5da8e83085 100644 --- a/typedapi/security/clearcachedservicetokens/response.go +++ b/typedapi/security/clearcachedservicetokens/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clearcachedservicetokens @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package clearcachedservicetokens // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/clear_cached_service_tokens/ClearCachedServiceTokensResponse.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/clear_cached_service_tokens/ClearCachedServiceTokensResponse.ts#L25-L32 type Response struct { ClusterName string `json:"cluster_name"` NodeStats types.NodeStatistics `json:"_nodes"` diff --git a/typedapi/security/createapikey/create_api_key.go b/typedapi/security/createapikey/create_api_key.go index 60d948c436..4783b355b7 100644 --- a/typedapi/security/createapikey/create_api_key.go +++ b/typedapi/security/createapikey/create_api_key.go @@ -16,9 +16,15 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates an API key for access without requiring basic authentication. +// A successful request returns a JSON structure that contains the API key, its +// unique id, and its name. +// If applicable, it also returns expiration information for the API key in +// milliseconds. +// NOTE: By default, API keys never expire. You can specify expiration +// information when you create the API keys. package createapikey import ( @@ -30,6 +36,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -74,6 +81,12 @@ func NewCreateApiKeyFunc(tp elastictransport.Interface) NewCreateApiKey { } // Creates an API key for access without requiring basic authentication. +// A successful request returns a JSON structure that contains the API key, its +// unique id, and its name. +// If applicable, it also returns expiration information for the API key in +// milliseconds. +// NOTE: By default, API keys never expire. You can specify expiration +// information when you create the API keys. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-api-key.html func New(tp elastictransport.Interface) *CreateApiKey { @@ -302,6 +315,50 @@ func (r *CreateApiKey) Refresh(refresh refresh.Refresh) *CreateApiKey { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *CreateApiKey) ErrorTrace(errortrace bool) *CreateApiKey { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *CreateApiKey) FilterPath(filterpaths ...string) *CreateApiKey { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *CreateApiKey) Human(human bool) *CreateApiKey { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *CreateApiKey) Pretty(pretty bool) *CreateApiKey { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Expiration Expiration time for the API key. By default, API keys never expire. // API name: expiration func (r *CreateApiKey) Expiration(duration types.Duration) *CreateApiKey { diff --git a/typedapi/security/createapikey/request.go b/typedapi/security/createapikey/request.go index 89a962fe9b..85c40dfdd4 100644 --- a/typedapi/security/createapikey/request.go +++ b/typedapi/security/createapikey/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package createapikey @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package createapikey // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/create_api_key/SecurityCreateApiKeyRequest.ts#L26-L58 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/create_api_key/SecurityCreateApiKeyRequest.ts#L26-L58 type Request struct { // Expiration Expiration time for the API key. By default, API keys never expire. @@ -59,6 +59,7 @@ func NewRequest() *Request { r := &Request{ RoleDescriptors: make(map[string]types.RoleDescriptor, 0), } + return r } diff --git a/typedapi/security/createapikey/response.go b/typedapi/security/createapikey/response.go index fdf66cd6a2..dc2405b813 100644 --- a/typedapi/security/createapikey/response.go +++ b/typedapi/security/createapikey/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package createapikey // Response holds the response body struct for the package createapikey // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/create_api_key/SecurityCreateApiKeyResponse.ts#L23-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/create_api_key/SecurityCreateApiKeyResponse.ts#L23-L50 type Response struct { // ApiKey Generated API key. diff --git a/typedapi/security/createcrossclusterapikey/create_cross_cluster_api_key.go b/typedapi/security/createcrossclusterapikey/create_cross_cluster_api_key.go index db4e55ffe8..3e4e52d790 100644 --- a/typedapi/security/createcrossclusterapikey/create_cross_cluster_api_key.go +++ b/typedapi/security/createcrossclusterapikey/create_cross_cluster_api_key.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates a cross-cluster API key for API key based remote cluster access. package createcrossclusterapikey @@ -26,7 +26,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strings" @@ -205,7 +204,7 @@ func (r CreateCrossClusterApiKey) IsSuccess(providedCtx context.Context) (bool, if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err diff --git a/typedapi/security/createservicetoken/create_service_token.go b/typedapi/security/createservicetoken/create_service_token.go index 272cd8ad4c..53b55584cd 100644 --- a/typedapi/security/createservicetoken/create_service_token.go +++ b/typedapi/security/createservicetoken/create_service_token.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates a service account token for access without requiring basic +// Creates a service accounts token for access without requiring basic // authentication. package createservicetoken @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -86,7 +86,7 @@ func NewCreateServiceTokenFunc(tp elastictransport.Interface) NewCreateServiceTo } } -// Creates a service account token for access without requiring basic +// Creates a service accounts token for access without requiring basic // authentication. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-create-service-token.html @@ -312,7 +312,7 @@ func (r CreateServiceToken) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -376,3 +376,47 @@ func (r *CreateServiceToken) Refresh(refresh refresh.Refresh) *CreateServiceToke return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *CreateServiceToken) ErrorTrace(errortrace bool) *CreateServiceToken { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *CreateServiceToken) FilterPath(filterpaths ...string) *CreateServiceToken { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *CreateServiceToken) Human(human bool) *CreateServiceToken { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *CreateServiceToken) Pretty(pretty bool) *CreateServiceToken { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/createservicetoken/response.go b/typedapi/security/createservicetoken/response.go index 12df57ae80..0a090547d9 100644 --- a/typedapi/security/createservicetoken/response.go +++ b/typedapi/security/createservicetoken/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package createservicetoken @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package createservicetoken // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/create_service_token/CreateServiceTokenResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/create_service_token/CreateServiceTokenResponse.ts#L22-L27 type Response struct { Created bool `json:"created"` Token types.ServiceToken `json:"token"` diff --git a/typedapi/security/deleteprivileges/delete_privileges.go b/typedapi/security/deleteprivileges/delete_privileges.go index f5e9668061..ad29e7b02f 100644 --- a/typedapi/security/deleteprivileges/delete_privileges.go +++ b/typedapi/security/deleteprivileges/delete_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Removes application privileges. package deleteprivileges @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -274,7 +274,7 @@ func (r DeletePrivileges) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -329,3 +329,47 @@ func (r *DeletePrivileges) Refresh(refresh refresh.Refresh) *DeletePrivileges { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeletePrivileges) ErrorTrace(errortrace bool) *DeletePrivileges { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeletePrivileges) FilterPath(filterpaths ...string) *DeletePrivileges { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeletePrivileges) Human(human bool) *DeletePrivileges { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeletePrivileges) Pretty(pretty bool) *DeletePrivileges { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/deleteprivileges/response.go b/typedapi/security/deleteprivileges/response.go index 93d9034273..def9f4ed49 100644 --- a/typedapi/security/deleteprivileges/response.go +++ b/typedapi/security/deleteprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deleteprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package deleteprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/delete_privileges/SecurityDeletePrivilegesResponse.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/delete_privileges/SecurityDeletePrivilegesResponse.ts#L23-L26 type Response map[string]map[string]types.FoundStatus diff --git a/typedapi/security/deleterole/delete_role.go b/typedapi/security/deleterole/delete_role.go index ac97e87811..ea5b234030 100644 --- a/typedapi/security/deleterole/delete_role.go +++ b/typedapi/security/deleterole/delete_role.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Removes roles in the native realm. package deleterole @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -263,7 +263,7 @@ func (r DeleteRole) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -309,3 +309,47 @@ func (r *DeleteRole) Refresh(refresh refresh.Refresh) *DeleteRole { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteRole) ErrorTrace(errortrace bool) *DeleteRole { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteRole) FilterPath(filterpaths ...string) *DeleteRole { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteRole) Human(human bool) *DeleteRole { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteRole) Pretty(pretty bool) *DeleteRole { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/deleterole/response.go b/typedapi/security/deleterole/response.go index b60c08322c..e18401e299 100644 --- a/typedapi/security/deleterole/response.go +++ b/typedapi/security/deleterole/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deleterole // Response holds the response body struct for the package deleterole // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/delete_role/SecurityDeleteRoleResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/delete_role/SecurityDeleteRoleResponse.ts#L20-L22 type Response struct { Found bool `json:"found"` } diff --git a/typedapi/security/deleterolemapping/delete_role_mapping.go b/typedapi/security/deleterolemapping/delete_role_mapping.go index c86d2c60b0..5fbaf3b60c 100644 --- a/typedapi/security/deleterolemapping/delete_role_mapping.go +++ b/typedapi/security/deleterolemapping/delete_role_mapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Removes role mappings. package deleterolemapping @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -263,7 +263,7 @@ func (r DeleteRoleMapping) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -309,3 +309,47 @@ func (r *DeleteRoleMapping) Refresh(refresh refresh.Refresh) *DeleteRoleMapping return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteRoleMapping) ErrorTrace(errortrace bool) *DeleteRoleMapping { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteRoleMapping) FilterPath(filterpaths ...string) *DeleteRoleMapping { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteRoleMapping) Human(human bool) *DeleteRoleMapping { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteRoleMapping) Pretty(pretty bool) *DeleteRoleMapping { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/deleterolemapping/response.go b/typedapi/security/deleterolemapping/response.go index 5b02d67340..a9fa7e4329 100644 --- a/typedapi/security/deleterolemapping/response.go +++ b/typedapi/security/deleterolemapping/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deleterolemapping // Response holds the response body struct for the package deleterolemapping // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/delete_role_mapping/SecurityDeleteRoleMappingResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/delete_role_mapping/SecurityDeleteRoleMappingResponse.ts#L20-L22 type Response struct { Found bool `json:"found"` } diff --git a/typedapi/security/deleteservicetoken/delete_service_token.go b/typedapi/security/deleteservicetoken/delete_service_token.go index 16308ed5a3..e28962ffa9 100644 --- a/typedapi/security/deleteservicetoken/delete_service_token.go +++ b/typedapi/security/deleteservicetoken/delete_service_token.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes a service account token. package deleteservicetoken @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -289,7 +289,7 @@ func (r DeleteServiceToken) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -353,3 +353,47 @@ func (r *DeleteServiceToken) Refresh(refresh refresh.Refresh) *DeleteServiceToke return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteServiceToken) ErrorTrace(errortrace bool) *DeleteServiceToken { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteServiceToken) FilterPath(filterpaths ...string) *DeleteServiceToken { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteServiceToken) Human(human bool) *DeleteServiceToken { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteServiceToken) Pretty(pretty bool) *DeleteServiceToken { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/deleteservicetoken/response.go b/typedapi/security/deleteservicetoken/response.go index 8e74657ee0..429ee0d3b0 100644 --- a/typedapi/security/deleteservicetoken/response.go +++ b/typedapi/security/deleteservicetoken/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deleteservicetoken // Response holds the response body struct for the package deleteservicetoken // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/delete_service_token/DeleteServiceTokenResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/delete_service_token/DeleteServiceTokenResponse.ts#L20-L22 type Response struct { Found bool `json:"found"` } diff --git a/typedapi/security/deleteuser/delete_user.go b/typedapi/security/deleteuser/delete_user.go index 9bdc450e49..11751b84e3 100644 --- a/typedapi/security/deleteuser/delete_user.go +++ b/typedapi/security/deleteuser/delete_user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes users from the native realm. package deleteuser @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -263,7 +263,7 @@ func (r DeleteUser) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -309,3 +309,47 @@ func (r *DeleteUser) Refresh(refresh refresh.Refresh) *DeleteUser { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteUser) ErrorTrace(errortrace bool) *DeleteUser { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteUser) FilterPath(filterpaths ...string) *DeleteUser { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteUser) Human(human bool) *DeleteUser { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteUser) Pretty(pretty bool) *DeleteUser { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/deleteuser/response.go b/typedapi/security/deleteuser/response.go index 09e272e7e3..1e2c3609d2 100644 --- a/typedapi/security/deleteuser/response.go +++ b/typedapi/security/deleteuser/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deleteuser // Response holds the response body struct for the package deleteuser // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/delete_user/SecurityDeleteUserResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/delete_user/SecurityDeleteUserResponse.ts#L20-L22 type Response struct { Found bool `json:"found"` } diff --git a/typedapi/security/disableuser/disable_user.go b/typedapi/security/disableuser/disable_user.go index 17089e526d..c637d003dd 100644 --- a/typedapi/security/disableuser/disable_user.go +++ b/typedapi/security/disableuser/disable_user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Disables users in the native realm. package disableuser @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -265,7 +265,7 @@ func (r DisableUser) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -311,3 +311,47 @@ func (r *DisableUser) Refresh(refresh refresh.Refresh) *DisableUser { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DisableUser) ErrorTrace(errortrace bool) *DisableUser { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DisableUser) FilterPath(filterpaths ...string) *DisableUser { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DisableUser) Human(human bool) *DisableUser { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DisableUser) Pretty(pretty bool) *DisableUser { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/disableuser/response.go b/typedapi/security/disableuser/response.go index 8104c868af..e2e0998e8d 100644 --- a/typedapi/security/disableuser/response.go +++ b/typedapi/security/disableuser/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package disableuser // Response holds the response body struct for the package disableuser // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/disable_user/SecurityDisableUserResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/disable_user/SecurityDisableUserResponse.ts#L20-L22 type Response struct { } diff --git a/typedapi/security/disableuserprofile/disable_user_profile.go b/typedapi/security/disableuserprofile/disable_user_profile.go index 0ae72b75d3..612ad6cdec 100644 --- a/typedapi/security/disableuserprofile/disable_user_profile.go +++ b/typedapi/security/disableuserprofile/disable_user_profile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Disables a user profile so it's not visible in user profile searches. package disableuserprofile @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -265,7 +265,7 @@ func (r DisableUserProfile) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -312,3 +312,47 @@ func (r *DisableUserProfile) Refresh(refresh refresh.Refresh) *DisableUserProfil return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DisableUserProfile) ErrorTrace(errortrace bool) *DisableUserProfile { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DisableUserProfile) FilterPath(filterpaths ...string) *DisableUserProfile { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DisableUserProfile) Human(human bool) *DisableUserProfile { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DisableUserProfile) Pretty(pretty bool) *DisableUserProfile { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/disableuserprofile/response.go b/typedapi/security/disableuserprofile/response.go index 783bd2bc3a..060b94d26b 100644 --- a/typedapi/security/disableuserprofile/response.go +++ b/typedapi/security/disableuserprofile/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package disableuserprofile // Response holds the response body struct for the package disableuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/disable_user_profile/Response.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/disable_user_profile/Response.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/security/enableuser/enable_user.go b/typedapi/security/enableuser/enable_user.go index 7ad63cac5e..7204f07580 100644 --- a/typedapi/security/enableuser/enable_user.go +++ b/typedapi/security/enableuser/enable_user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Enables users in the native realm. package enableuser @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -265,7 +265,7 @@ func (r EnableUser) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -311,3 +311,47 @@ func (r *EnableUser) Refresh(refresh refresh.Refresh) *EnableUser { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *EnableUser) ErrorTrace(errortrace bool) *EnableUser { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *EnableUser) FilterPath(filterpaths ...string) *EnableUser { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *EnableUser) Human(human bool) *EnableUser { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *EnableUser) Pretty(pretty bool) *EnableUser { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/enableuser/response.go b/typedapi/security/enableuser/response.go index d4c9bdb324..5654875d20 100644 --- a/typedapi/security/enableuser/response.go +++ b/typedapi/security/enableuser/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package enableuser // Response holds the response body struct for the package enableuser // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/enable_user/SecurityEnableUserResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/enable_user/SecurityEnableUserResponse.ts#L20-L22 type Response struct { } diff --git a/typedapi/security/enableuserprofile/enable_user_profile.go b/typedapi/security/enableuserprofile/enable_user_profile.go index d0d39a3bf4..4479b9f2d7 100644 --- a/typedapi/security/enableuserprofile/enable_user_profile.go +++ b/typedapi/security/enableuserprofile/enable_user_profile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Enables a user profile so it's visible in user profile searches. package enableuserprofile @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -265,7 +265,7 @@ func (r EnableUserProfile) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -312,3 +312,47 @@ func (r *EnableUserProfile) Refresh(refresh refresh.Refresh) *EnableUserProfile return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *EnableUserProfile) ErrorTrace(errortrace bool) *EnableUserProfile { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *EnableUserProfile) FilterPath(filterpaths ...string) *EnableUserProfile { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *EnableUserProfile) Human(human bool) *EnableUserProfile { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *EnableUserProfile) Pretty(pretty bool) *EnableUserProfile { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/enableuserprofile/response.go b/typedapi/security/enableuserprofile/response.go index c5e4ecfa82..2c581eb496 100644 --- a/typedapi/security/enableuserprofile/response.go +++ b/typedapi/security/enableuserprofile/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package enableuserprofile // Response holds the response body struct for the package enableuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/enable_user_profile/Response.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/enable_user_profile/Response.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/security/enrollkibana/enroll_kibana.go b/typedapi/security/enrollkibana/enroll_kibana.go index 0f846ae6df..ac830d6eb5 100644 --- a/typedapi/security/enrollkibana/enroll_kibana.go +++ b/typedapi/security/enrollkibana/enroll_kibana.go @@ -16,10 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Allows a kibana instance to configure itself to communicate with a secured -// elasticsearch cluster. +// Enables a Kibana instance to configure itself for communication with a +// secured Elasticsearch cluster. package enrollkibana import ( @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -69,8 +69,8 @@ func NewEnrollKibanaFunc(tp elastictransport.Interface) NewEnrollKibana { } } -// Allows a kibana instance to configure itself to communicate with a secured -// elasticsearch cluster. +// Enables a Kibana instance to configure itself for communication with a +// secured Elasticsearch cluster. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-kibana-enrollment.html func New(tp elastictransport.Interface) *EnrollKibana { @@ -258,7 +258,7 @@ func (r EnrollKibana) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -285,3 +285,47 @@ func (r *EnrollKibana) Header(key, value string) *EnrollKibana { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *EnrollKibana) ErrorTrace(errortrace bool) *EnrollKibana { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *EnrollKibana) FilterPath(filterpaths ...string) *EnrollKibana { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *EnrollKibana) Human(human bool) *EnrollKibana { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *EnrollKibana) Pretty(pretty bool) *EnrollKibana { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/enrollkibana/response.go b/typedapi/security/enrollkibana/response.go index 7840d492c5..8ac326ff6c 100644 --- a/typedapi/security/enrollkibana/response.go +++ b/typedapi/security/enrollkibana/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package enrollkibana @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package enrollkibana // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/enroll_kibana/Response.ts#L20-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/enroll_kibana/Response.ts#L20-L25 type Response struct { HttpCa string `json:"http_ca"` Token types.KibanaToken `json:"token"` diff --git a/typedapi/security/enrollnode/enroll_node.go b/typedapi/security/enrollnode/enroll_node.go index 55aeba1552..e2c7b4d8a9 100644 --- a/typedapi/security/enrollnode/enroll_node.go +++ b/typedapi/security/enrollnode/enroll_node.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Allows a new node to enroll to an existing cluster with security enabled. +// Allows a new node to join an existing cluster with security features enabled. package enrollnode import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -68,7 +68,7 @@ func NewEnrollNodeFunc(tp elastictransport.Interface) NewEnrollNode { } } -// Allows a new node to enroll to an existing cluster with security enabled. +// Allows a new node to join an existing cluster with security features enabled. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-node-enrollment.html func New(tp elastictransport.Interface) *EnrollNode { @@ -256,7 +256,7 @@ func (r EnrollNode) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -283,3 +283,47 @@ func (r *EnrollNode) Header(key, value string) *EnrollNode { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *EnrollNode) ErrorTrace(errortrace bool) *EnrollNode { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *EnrollNode) FilterPath(filterpaths ...string) *EnrollNode { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *EnrollNode) Human(human bool) *EnrollNode { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *EnrollNode) Pretty(pretty bool) *EnrollNode { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/enrollnode/response.go b/typedapi/security/enrollnode/response.go index 24bdffc70c..47aab2b82a 100644 --- a/typedapi/security/enrollnode/response.go +++ b/typedapi/security/enrollnode/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package enrollnode // Response holds the response body struct for the package enrollnode // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/enroll_node/Response.ts#L20-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/enroll_node/Response.ts#L20-L29 type Response struct { HttpCaCert string `json:"http_ca_cert"` HttpCaKey string `json:"http_ca_key"` diff --git a/typedapi/security/getapikey/get_api_key.go b/typedapi/security/getapikey/get_api_key.go index 06e0de75c5..97049a6815 100644 --- a/typedapi/security/getapikey/get_api_key.go +++ b/typedapi/security/getapikey/get_api_key.go @@ -16,9 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves information for one or more API keys. +// NOTE: If you have only the `manage_own_api_key` privilege, this API returns +// only the API keys that you own. +// If you have `read_security`, `manage_api_key` or greater privileges +// (including `manage_security`), this API returns all API keys regardless of +// ownership. package getapikey import ( @@ -27,7 +32,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -70,6 +74,11 @@ func NewGetApiKeyFunc(tp elastictransport.Interface) NewGetApiKey { } // Retrieves information for one or more API keys. +// NOTE: If you have only the `manage_own_api_key` privilege, this API returns +// only the API keys that you own. +// If you have `read_security`, `manage_api_key` or greater privileges +// (including `manage_security`), this API returns all API keys regardless of +// ownership. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-api-key.html func New(tp elastictransport.Interface) *GetApiKey { @@ -249,7 +258,7 @@ func (r GetApiKey) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -350,3 +359,56 @@ func (r *GetApiKey) ActiveOnly(activeonly bool) *GetApiKey { return r } + +// WithProfileUid Determines whether to also retrieve the profile uid, for the API key owner +// principal, if it exists. +// API name: with_profile_uid +func (r *GetApiKey) WithProfileUid(withprofileuid bool) *GetApiKey { + r.values.Set("with_profile_uid", strconv.FormatBool(withprofileuid)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetApiKey) ErrorTrace(errortrace bool) *GetApiKey { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetApiKey) FilterPath(filterpaths ...string) *GetApiKey { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetApiKey) Human(human bool) *GetApiKey { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetApiKey) Pretty(pretty bool) *GetApiKey { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/getapikey/response.go b/typedapi/security/getapikey/response.go index d8b5c33bf0..82c4b67b66 100644 --- a/typedapi/security/getapikey/response.go +++ b/typedapi/security/getapikey/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getapikey @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getapikey // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_api_key/SecurityGetApiKeyResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_api_key/SecurityGetApiKeyResponse.ts#L22-L24 type Response struct { ApiKeys []types.ApiKey `json:"api_keys"` } diff --git a/typedapi/security/getbuiltinprivileges/get_builtin_privileges.go b/typedapi/security/getbuiltinprivileges/get_builtin_privileges.go index 8cbedc741e..1f38393904 100644 --- a/typedapi/security/getbuiltinprivileges/get_builtin_privileges.go +++ b/typedapi/security/getbuiltinprivileges/get_builtin_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves the list of cluster privileges and index privileges that are // available in this version of Elasticsearch. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -252,7 +252,7 @@ func (r GetBuiltinPrivileges) IsSuccess(providedCtx context.Context) (bool, erro if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -279,3 +279,47 @@ func (r *GetBuiltinPrivileges) Header(key, value string) *GetBuiltinPrivileges { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetBuiltinPrivileges) ErrorTrace(errortrace bool) *GetBuiltinPrivileges { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetBuiltinPrivileges) FilterPath(filterpaths ...string) *GetBuiltinPrivileges { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetBuiltinPrivileges) Human(human bool) *GetBuiltinPrivileges { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetBuiltinPrivileges) Pretty(pretty bool) *GetBuiltinPrivileges { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/getbuiltinprivileges/response.go b/typedapi/security/getbuiltinprivileges/response.go index 64a0f0fc47..002bf79044 100644 --- a/typedapi/security/getbuiltinprivileges/response.go +++ b/typedapi/security/getbuiltinprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getbuiltinprivileges @@ -30,7 +30,7 @@ import ( // Response holds the response body struct for the package getbuiltinprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_builtin_privileges/SecurityGetBuiltinPrivilegesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_builtin_privileges/SecurityGetBuiltinPrivilegesResponse.ts#L22-L24 type Response struct { Cluster []string `json:"cluster"` Index []string `json:"index"` diff --git a/typedapi/security/getprivileges/get_privileges.go b/typedapi/security/getprivileges/get_privileges.go index 1000367266..b762ca0263 100644 --- a/typedapi/security/getprivileges/get_privileges.go +++ b/typedapi/security/getprivileges/get_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves application privileges. package getprivileges @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -289,7 +289,7 @@ func (r GetPrivileges) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -334,3 +334,47 @@ func (r *GetPrivileges) Name(name string) *GetPrivileges { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetPrivileges) ErrorTrace(errortrace bool) *GetPrivileges { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetPrivileges) FilterPath(filterpaths ...string) *GetPrivileges { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetPrivileges) Human(human bool) *GetPrivileges { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetPrivileges) Pretty(pretty bool) *GetPrivileges { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/getprivileges/response.go b/typedapi/security/getprivileges/response.go index 5feda58df5..28740ed4f9 100644 --- a/typedapi/security/getprivileges/response.go +++ b/typedapi/security/getprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_privileges/SecurityGetPrivilegesResponse.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_privileges/SecurityGetPrivilegesResponse.ts#L23-L26 type Response map[string]map[string]types.PrivilegesActions diff --git a/typedapi/security/getrole/get_role.go b/typedapi/security/getrole/get_role.go index 25de235439..9b098e53ab 100644 --- a/typedapi/security/getrole/get_role.go +++ b/typedapi/security/getrole/get_role.go @@ -16,9 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Retrieves roles in the native realm. +// The role management APIs are generally the preferred way to manage roles, +// rather than using file-based role management. +// The get roles API cannot retrieve roles that are defined in roles files. package getrole import ( @@ -27,9 +29,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -74,7 +76,9 @@ func NewGetRoleFunc(tp elastictransport.Interface) NewGetRole { } } -// Retrieves roles in the native realm. +// The role management APIs are generally the preferred way to manage roles, +// rather than using file-based role management. +// The get roles API cannot retrieve roles that are defined in roles files. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-role.html func New(tp elastictransport.Interface) *GetRole { @@ -267,7 +271,7 @@ func (r GetRole) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -305,3 +309,47 @@ func (r *GetRole) Name(name string) *GetRole { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetRole) ErrorTrace(errortrace bool) *GetRole { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetRole) FilterPath(filterpaths ...string) *GetRole { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetRole) Human(human bool) *GetRole { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetRole) Pretty(pretty bool) *GetRole { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/getrole/response.go b/typedapi/security/getrole/response.go index 49f1073f98..a9fc6bf865 100644 --- a/typedapi/security/getrole/response.go +++ b/typedapi/security/getrole/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getrole @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrole // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_role/SecurityGetRoleResponse.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_role/SecurityGetRoleResponse.ts#L23-L26 type Response map[string]types.Role diff --git a/typedapi/security/getrolemapping/get_role_mapping.go b/typedapi/security/getrolemapping/get_role_mapping.go index 429d0bfb85..9f2bf397c8 100644 --- a/typedapi/security/getrolemapping/get_role_mapping.go +++ b/typedapi/security/getrolemapping/get_role_mapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves role mappings. package getrolemapping @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -267,7 +267,7 @@ func (r GetRoleMapping) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -307,3 +307,47 @@ func (r *GetRoleMapping) Name(name string) *GetRoleMapping { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetRoleMapping) ErrorTrace(errortrace bool) *GetRoleMapping { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetRoleMapping) FilterPath(filterpaths ...string) *GetRoleMapping { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetRoleMapping) Human(human bool) *GetRoleMapping { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetRoleMapping) Pretty(pretty bool) *GetRoleMapping { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/getrolemapping/response.go b/typedapi/security/getrolemapping/response.go index 56d39540e5..8e2ee38b2c 100644 --- a/typedapi/security/getrolemapping/response.go +++ b/typedapi/security/getrolemapping/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getrolemapping @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getrolemapping // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_role_mapping/SecurityGetRoleMappingResponse.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_role_mapping/SecurityGetRoleMappingResponse.ts#L23-L26 type Response map[string]types.SecurityRoleMapping diff --git a/typedapi/security/getserviceaccounts/get_service_accounts.go b/typedapi/security/getserviceaccounts/get_service_accounts.go index d9d0b42ade..225e976601 100644 --- a/typedapi/security/getserviceaccounts/get_service_accounts.go +++ b/typedapi/security/getserviceaccounts/get_service_accounts.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Retrieves information about service accounts. +// This API returns a list of service accounts that match the provided path +// parameter(s). package getserviceaccounts import ( @@ -27,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -77,7 +78,8 @@ func NewGetServiceAccountsFunc(tp elastictransport.Interface) NewGetServiceAccou } } -// Retrieves information about service accounts. +// This API returns a list of service accounts that match the provided path +// parameter(s). // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-service-accounts.html func New(tp elastictransport.Interface) *GetServiceAccounts { @@ -289,7 +291,7 @@ func (r GetServiceAccounts) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -337,3 +339,47 @@ func (r *GetServiceAccounts) Service(service string) *GetServiceAccounts { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetServiceAccounts) ErrorTrace(errortrace bool) *GetServiceAccounts { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetServiceAccounts) FilterPath(filterpaths ...string) *GetServiceAccounts { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetServiceAccounts) Human(human bool) *GetServiceAccounts { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetServiceAccounts) Pretty(pretty bool) *GetServiceAccounts { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/getserviceaccounts/response.go b/typedapi/security/getserviceaccounts/response.go index c8516934cf..3ca05ab41a 100644 --- a/typedapi/security/getserviceaccounts/response.go +++ b/typedapi/security/getserviceaccounts/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getserviceaccounts @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getserviceaccounts // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_service_accounts/GetServiceAccountsResponse.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_service_accounts/GetServiceAccountsResponse.ts#L23-L26 type Response map[string]types.RoleDescriptorWrapper diff --git a/typedapi/security/getservicecredentials/get_service_credentials.go b/typedapi/security/getservicecredentials/get_service_credentials.go index 169556127e..7f316bbe8e 100644 --- a/typedapi/security/getservicecredentials/get_service_credentials.go +++ b/typedapi/security/getservicecredentials/get_service_credentials.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves information of all service credentials for a service account. package getservicecredentials @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -275,7 +275,7 @@ func (r GetServiceCredentials) IsSuccess(providedCtx context.Context) (bool, err if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -320,3 +320,47 @@ func (r *GetServiceCredentials) _service(service string) *GetServiceCredentials return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetServiceCredentials) ErrorTrace(errortrace bool) *GetServiceCredentials { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetServiceCredentials) FilterPath(filterpaths ...string) *GetServiceCredentials { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetServiceCredentials) Human(human bool) *GetServiceCredentials { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetServiceCredentials) Pretty(pretty bool) *GetServiceCredentials { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/getservicecredentials/response.go b/typedapi/security/getservicecredentials/response.go index 5aad251ba8..4439aa654a 100644 --- a/typedapi/security/getservicecredentials/response.go +++ b/typedapi/security/getservicecredentials/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getservicecredentials @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getservicecredentials // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_service_credentials/GetServiceCredentialsResponse.ts#L25-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_service_credentials/GetServiceCredentialsResponse.ts#L25-L33 type Response struct { Count int `json:"count"` // NodesCredentials Contains service account credentials collected from all nodes of the cluster diff --git a/typedapi/security/getsettings/get_settings.go b/typedapi/security/getsettings/get_settings.go index 70718e1fc0..3427c466c3 100644 --- a/typedapi/security/getsettings/get_settings.go +++ b/typedapi/security/getsettings/get_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieve settings for the security system indices package getsettings @@ -26,7 +26,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strings" @@ -203,7 +202,7 @@ func (r GetSettings) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err diff --git a/typedapi/security/gettoken/get_token.go b/typedapi/security/gettoken/get_token.go index d68db82564..e21f7bb4e7 100644 --- a/typedapi/security/gettoken/get_token.go +++ b/typedapi/security/gettoken/get_token.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates a bearer token for access without requiring basic authentication. package gettoken @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -294,6 +295,50 @@ func (r *GetToken) Header(key, value string) *GetToken { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetToken) ErrorTrace(errortrace bool) *GetToken { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetToken) FilterPath(filterpaths ...string) *GetToken { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetToken) Human(human bool) *GetToken { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetToken) Pretty(pretty bool) *GetToken { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: grant_type func (r *GetToken) GrantType(granttype accesstokengranttype.AccessTokenGrantType) *GetToken { r.req.GrantType = &granttype diff --git a/typedapi/security/gettoken/request.go b/typedapi/security/gettoken/request.go index bf198b70df..5fad93ab2b 100644 --- a/typedapi/security/gettoken/request.go +++ b/typedapi/security/gettoken/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package gettoken @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package gettoken // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_token/GetUserAccessTokenRequest.ts#L25-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_token/GetUserAccessTokenRequest.ts#L25-L39 type Request struct { GrantType *accesstokengranttype.AccessTokenGrantType `json:"grant_type,omitempty"` KerberosTicket *string `json:"kerberos_ticket,omitempty"` @@ -46,6 +46,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/security/gettoken/response.go b/typedapi/security/gettoken/response.go index 9ec03e6800..4ef28c7a7b 100644 --- a/typedapi/security/gettoken/response.go +++ b/typedapi/security/gettoken/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package gettoken @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettoken // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_token/GetUserAccessTokenResponse.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_token/GetUserAccessTokenResponse.ts#L23-L33 type Response struct { AccessToken string `json:"access_token"` Authentication types.AuthenticatedUser `json:"authentication"` diff --git a/typedapi/security/getuser/get_user.go b/typedapi/security/getuser/get_user.go index 852a130f4a..210652dc62 100644 --- a/typedapi/security/getuser/get_user.go +++ b/typedapi/security/getuser/get_user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves information about users in the native realm and built-in users. package getuser @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -268,7 +267,7 @@ func (r GetUser) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -314,3 +313,47 @@ func (r *GetUser) WithProfileUid(withprofileuid bool) *GetUser { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetUser) ErrorTrace(errortrace bool) *GetUser { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetUser) FilterPath(filterpaths ...string) *GetUser { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetUser) Human(human bool) *GetUser { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetUser) Pretty(pretty bool) *GetUser { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/getuser/response.go b/typedapi/security/getuser/response.go index f42200eeb5..89c408c537 100644 --- a/typedapi/security/getuser/response.go +++ b/typedapi/security/getuser/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getuser @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getuser // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_user/SecurityGetUserResponse.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_user/SecurityGetUserResponse.ts#L23-L26 type Response map[string]types.User diff --git a/typedapi/security/getuserprivileges/get_user_privileges.go b/typedapi/security/getuserprivileges/get_user_privileges.go index 7ee6738d57..d574ab76cf 100644 --- a/typedapi/security/getuserprivileges/get_user_privileges.go +++ b/typedapi/security/getuserprivileges/get_user_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves security privileges for the logged in user. package getuserprivileges @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -250,7 +250,7 @@ func (r GetUserPrivileges) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -303,3 +303,47 @@ func (r *GetUserPrivileges) Username(username string) *GetUserPrivileges { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetUserPrivileges) ErrorTrace(errortrace bool) *GetUserPrivileges { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetUserPrivileges) FilterPath(filterpaths ...string) *GetUserPrivileges { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetUserPrivileges) Human(human bool) *GetUserPrivileges { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetUserPrivileges) Pretty(pretty bool) *GetUserPrivileges { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/getuserprivileges/response.go b/typedapi/security/getuserprivileges/response.go index 27152d0469..4bc45fe672 100644 --- a/typedapi/security/getuserprivileges/response.go +++ b/typedapi/security/getuserprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getuserprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getuserprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_user_privileges/SecurityGetUserPrivilegesResponse.ts#L27-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_user_privileges/SecurityGetUserPrivilegesResponse.ts#L27-L35 type Response struct { Applications []types.ApplicationPrivileges `json:"applications"` Cluster []string `json:"cluster"` diff --git a/typedapi/security/getuserprofile/get_user_profile.go b/typedapi/security/getuserprofile/get_user_profile.go index 2ba1b26d39..48088c577a 100644 --- a/typedapi/security/getuserprofile/get_user_profile.go +++ b/typedapi/security/getuserprofile/get_user_profile.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Retrieves user profiles for the given unique ID(s). +// Retrieves a user's profile using the unique profile ID. package getuserprofile import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +76,7 @@ func NewGetUserProfileFunc(tp elastictransport.Interface) NewGetUserProfile { } } -// Retrieves user profiles for the given unique ID(s). +// Retrieves a user's profile using the unique profile ID. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-get-user-profile.html func New(tp elastictransport.Interface) *GetUserProfile { @@ -262,7 +262,7 @@ func (r GetUserProfile) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -313,3 +313,47 @@ func (r *GetUserProfile) Data(data ...string) *GetUserProfile { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetUserProfile) ErrorTrace(errortrace bool) *GetUserProfile { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetUserProfile) FilterPath(filterpaths ...string) *GetUserProfile { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetUserProfile) Human(human bool) *GetUserProfile { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetUserProfile) Pretty(pretty bool) *GetUserProfile { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/getuserprofile/response.go b/typedapi/security/getuserprofile/response.go index 930f5093d4..72e38d27bf 100644 --- a/typedapi/security/getuserprofile/response.go +++ b/typedapi/security/getuserprofile/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getuserprofile @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_user_profile/Response.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_user_profile/Response.ts#L23-L28 type Response struct { Errors *types.GetUserProfileErrors `json:"errors,omitempty"` Profiles []types.UserProfileWithMetadata `json:"profiles"` diff --git a/typedapi/security/grantapikey/grant_api_key.go b/typedapi/security/grantapikey/grant_api_key.go index ee28a4aab2..7f73dc5472 100644 --- a/typedapi/security/grantapikey/grant_api_key.go +++ b/typedapi/security/grantapikey/grant_api_key.go @@ -16,9 +16,31 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates an API key on behalf of another user. +// This API is similar to Create API keys, however it creates the API key for a +// user that is different than the user that runs the API. +// The caller must have authentication credentials (either an access token, or a +// username and password) for the user on whose behalf the API key will be +// created. +// It is not possible to use this API to create an API key without that user’s +// credentials. +// The user, for whom the authentication credentials is provided, can optionally +// "run as" (impersonate) another user. +// In this case, the API key will be created on behalf of the impersonated user. +// +// This API is intended be used by applications that need to create and manage +// API keys for end users, but cannot guarantee that those users have permission +// to create API keys on their own behalf. +// +// A successful grant API key API call returns a JSON structure that contains +// the API key, its unique id, and its name. +// If applicable, it also returns expiration information for the API key in +// milliseconds. +// +// By default, API keys never expire. You can specify expiration information +// when you create the API keys. package grantapikey import ( @@ -30,6 +52,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -74,6 +97,28 @@ func NewGrantApiKeyFunc(tp elastictransport.Interface) NewGrantApiKey { } // Creates an API key on behalf of another user. +// This API is similar to Create API keys, however it creates the API key for a +// user that is different than the user that runs the API. +// The caller must have authentication credentials (either an access token, or a +// username and password) for the user on whose behalf the API key will be +// created. +// It is not possible to use this API to create an API key without that user’s +// credentials. +// The user, for whom the authentication credentials is provided, can optionally +// "run as" (impersonate) another user. +// In this case, the API key will be created on behalf of the impersonated user. +// +// This API is intended be used by applications that need to create and manage +// API keys for end users, but cannot guarantee that those users have permission +// to create API keys on their own behalf. +// +// A successful grant API key API call returns a JSON structure that contains +// the API key, its unique id, and its name. +// If applicable, it also returns expiration information for the API key in +// milliseconds. +// +// By default, API keys never expire. You can specify expiration information +// when you create the API keys. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-grant-api-key.html func New(tp elastictransport.Interface) *GrantApiKey { @@ -294,6 +339,50 @@ func (r *GrantApiKey) Header(key, value string) *GrantApiKey { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GrantApiKey) ErrorTrace(errortrace bool) *GrantApiKey { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GrantApiKey) FilterPath(filterpaths ...string) *GrantApiKey { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GrantApiKey) Human(human bool) *GrantApiKey { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GrantApiKey) Pretty(pretty bool) *GrantApiKey { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // AccessToken The user’s access token. // If you specify the `access_token` grant type, this parameter is required. // It is not valid with other grant types. diff --git a/typedapi/security/grantapikey/request.go b/typedapi/security/grantapikey/request.go index dbbd8b03a0..ca846dce56 100644 --- a/typedapi/security/grantapikey/request.go +++ b/typedapi/security/grantapikey/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package grantapikey @@ -34,7 +34,7 @@ import ( // Request holds the request body struct for the package grantapikey // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/grant_api_key/SecurityGrantApiKeyRequest.ts#L24-L75 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/grant_api_key/SecurityGrantApiKeyRequest.ts#L24-L75 type Request struct { // AccessToken The user’s access token. @@ -60,6 +60,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/security/grantapikey/response.go b/typedapi/security/grantapikey/response.go index 20afe7656a..3c7937de32 100644 --- a/typedapi/security/grantapikey/response.go +++ b/typedapi/security/grantapikey/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package grantapikey // Response holds the response body struct for the package grantapikey // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/grant_api_key/SecurityGrantApiKeyResponse.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/grant_api_key/SecurityGrantApiKeyResponse.ts#L23-L31 type Response struct { ApiKey string `json:"api_key"` Encoded string `json:"encoded"` diff --git a/typedapi/security/hasprivileges/has_privileges.go b/typedapi/security/hasprivileges/has_privileges.go index f47d430270..3a6712e46c 100644 --- a/typedapi/security/hasprivileges/has_privileges.go +++ b/typedapi/security/hasprivileges/has_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Determines whether the specified user has a specified list of privileges. package hasprivileges @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -324,6 +325,50 @@ func (r *HasPrivileges) User(user string) *HasPrivileges { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *HasPrivileges) ErrorTrace(errortrace bool) *HasPrivileges { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *HasPrivileges) FilterPath(filterpaths ...string) *HasPrivileges { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *HasPrivileges) Human(human bool) *HasPrivileges { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *HasPrivileges) Pretty(pretty bool) *HasPrivileges { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: application func (r *HasPrivileges) Application(applications ...types.ApplicationPrivilegesCheck) *HasPrivileges { r.req.Application = applications diff --git a/typedapi/security/hasprivileges/request.go b/typedapi/security/hasprivileges/request.go index 3cbc552296..baf93ae499 100644 --- a/typedapi/security/hasprivileges/request.go +++ b/typedapi/security/hasprivileges/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package hasprivileges @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package hasprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/has_privileges/SecurityHasPrivilegesRequest.ts#L25-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/has_privileges/SecurityHasPrivilegesRequest.ts#L25-L42 type Request struct { Application []types.ApplicationPrivilegesCheck `json:"application,omitempty"` // Cluster A list of the cluster privileges that you want to check. @@ -41,6 +41,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/security/hasprivileges/response.go b/typedapi/security/hasprivileges/response.go index 3eef4a07f8..d62a75213e 100644 --- a/typedapi/security/hasprivileges/response.go +++ b/typedapi/security/hasprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package hasprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package hasprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/has_privileges/SecurityHasPrivilegesResponse.ts#L24-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/has_privileges/SecurityHasPrivilegesResponse.ts#L24-L32 type Response struct { Application types.ApplicationsPrivileges `json:"application"` Cluster map[string]bool `json:"cluster"` diff --git a/typedapi/security/hasprivilegesuserprofile/has_privileges_user_profile.go b/typedapi/security/hasprivilegesuserprofile/has_privileges_user_profile.go index fe01b89bb3..daaa397c2c 100644 --- a/typedapi/security/hasprivilegesuserprofile/has_privileges_user_profile.go +++ b/typedapi/security/hasprivilegesuserprofile/has_privileges_user_profile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Determines whether the users associated with the specified profile IDs have // all the requested privileges. @@ -31,6 +31,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -295,6 +296,50 @@ func (r *HasPrivilegesUserProfile) Header(key, value string) *HasPrivilegesUserP return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *HasPrivilegesUserProfile) ErrorTrace(errortrace bool) *HasPrivilegesUserProfile { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *HasPrivilegesUserProfile) FilterPath(filterpaths ...string) *HasPrivilegesUserProfile { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *HasPrivilegesUserProfile) Human(human bool) *HasPrivilegesUserProfile { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *HasPrivilegesUserProfile) Pretty(pretty bool) *HasPrivilegesUserProfile { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: privileges func (r *HasPrivilegesUserProfile) Privileges(privileges *types.PrivilegesCheck) *HasPrivilegesUserProfile { diff --git a/typedapi/security/hasprivilegesuserprofile/request.go b/typedapi/security/hasprivilegesuserprofile/request.go index f9dff13ecd..8f940875ee 100644 --- a/typedapi/security/hasprivilegesuserprofile/request.go +++ b/typedapi/security/hasprivilegesuserprofile/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package hasprivilegesuserprofile @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package hasprivilegesuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/has_privileges_user_profile/Request.ts#L24-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/has_privileges_user_profile/Request.ts#L24-L38 type Request struct { Privileges types.PrivilegesCheck `json:"privileges"` // Uids A list of profile IDs. The privileges are checked for associated users of the @@ -40,6 +40,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/security/hasprivilegesuserprofile/response.go b/typedapi/security/hasprivilegesuserprofile/response.go index e976260a3e..31bf99c955 100644 --- a/typedapi/security/hasprivilegesuserprofile/response.go +++ b/typedapi/security/hasprivilegesuserprofile/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package hasprivilegesuserprofile @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package hasprivilegesuserprofile // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/has_privileges_user_profile/Response.ts#L23-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/has_privileges_user_profile/Response.ts#L23-L38 type Response struct { // Errors The subset of the requested profile IDs for which an error diff --git a/typedapi/security/invalidateapikey/invalidate_api_key.go b/typedapi/security/invalidateapikey/invalidate_api_key.go index 18ab6b39d8..d80d8271c2 100644 --- a/typedapi/security/invalidateapikey/invalidate_api_key.go +++ b/typedapi/security/invalidateapikey/invalidate_api_key.go @@ -16,9 +16,18 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Invalidates one or more API keys. +// The `manage_api_key` privilege allows deleting any API keys. +// The `manage_own_api_key` only allows deleting API keys that are owned by the +// user. +// In addition, with the `manage_own_api_key` privilege, an invalidation request +// must be issued in one of the three formats: +// - Set the parameter `owner=true`. +// - Or, set both `username` and `realm_name` to match the user’s identity. +// - Or, if the request is issued by an API key, i.e. an API key invalidates +// itself, specify its ID in the `ids` field. package invalidateapikey import ( @@ -30,6 +39,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -73,6 +83,15 @@ func NewInvalidateApiKeyFunc(tp elastictransport.Interface) NewInvalidateApiKey } // Invalidates one or more API keys. +// The `manage_api_key` privilege allows deleting any API keys. +// The `manage_own_api_key` only allows deleting API keys that are owned by the +// user. +// In addition, with the `manage_own_api_key` privilege, an invalidation request +// must be issued in one of the three formats: +// - Set the parameter `owner=true`. +// - Or, set both `username` and `realm_name` to match the user’s identity. +// - Or, if the request is issued by an API key, i.e. an API key invalidates +// itself, specify its ID in the `ids` field. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-invalidate-api-key.html func New(tp elastictransport.Interface) *InvalidateApiKey { @@ -291,6 +310,50 @@ func (r *InvalidateApiKey) Header(key, value string) *InvalidateApiKey { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *InvalidateApiKey) ErrorTrace(errortrace bool) *InvalidateApiKey { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *InvalidateApiKey) FilterPath(filterpaths ...string) *InvalidateApiKey { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *InvalidateApiKey) Human(human bool) *InvalidateApiKey { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *InvalidateApiKey) Pretty(pretty bool) *InvalidateApiKey { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: id func (r *InvalidateApiKey) Id(id string) *InvalidateApiKey { r.req.Id = &id diff --git a/typedapi/security/invalidateapikey/request.go b/typedapi/security/invalidateapikey/request.go index 2162d646b2..762014bebc 100644 --- a/typedapi/security/invalidateapikey/request.go +++ b/typedapi/security/invalidateapikey/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package invalidateapikey @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package invalidateapikey // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/invalidate_api_key/SecurityInvalidateApiKeyRequest.ts#L23-L66 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/invalidate_api_key/SecurityInvalidateApiKeyRequest.ts#L23-L66 type Request struct { Id *string `json:"id,omitempty"` // Ids A list of API key ids. @@ -59,6 +59,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -104,7 +105,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "owner": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/security/invalidateapikey/response.go b/typedapi/security/invalidateapikey/response.go index 5be96e9cdc..c04526be65 100644 --- a/typedapi/security/invalidateapikey/response.go +++ b/typedapi/security/invalidateapikey/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package invalidateapikey @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package invalidateapikey // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/invalidate_api_key/SecurityInvalidateApiKeyResponse.ts#L23-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/invalidate_api_key/SecurityInvalidateApiKeyResponse.ts#L23-L30 type Response struct { ErrorCount int `json:"error_count"` ErrorDetails []types.ErrorCause `json:"error_details,omitempty"` diff --git a/typedapi/security/invalidatetoken/invalidate_token.go b/typedapi/security/invalidatetoken/invalidate_token.go index 30dc615578..2878f129f8 100644 --- a/typedapi/security/invalidatetoken/invalidate_token.go +++ b/typedapi/security/invalidatetoken/invalidate_token.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Invalidates one or more access tokens or refresh tokens. package invalidatetoken @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -293,6 +294,50 @@ func (r *InvalidateToken) Header(key, value string) *InvalidateToken { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *InvalidateToken) ErrorTrace(errortrace bool) *InvalidateToken { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *InvalidateToken) FilterPath(filterpaths ...string) *InvalidateToken { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *InvalidateToken) Human(human bool) *InvalidateToken { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *InvalidateToken) Pretty(pretty bool) *InvalidateToken { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: realm_name func (r *InvalidateToken) RealmName(name string) *InvalidateToken { r.req.RealmName = &name diff --git a/typedapi/security/invalidatetoken/request.go b/typedapi/security/invalidatetoken/request.go index 3b27726000..2cdf36d021 100644 --- a/typedapi/security/invalidatetoken/request.go +++ b/typedapi/security/invalidatetoken/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package invalidatetoken @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package invalidatetoken // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/invalidate_token/SecurityInvalidateTokenRequest.ts#L23-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/invalidate_token/SecurityInvalidateTokenRequest.ts#L23-L35 type Request struct { RealmName *string `json:"realm_name,omitempty"` RefreshToken *string `json:"refresh_token,omitempty"` @@ -42,6 +42,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/security/invalidatetoken/response.go b/typedapi/security/invalidatetoken/response.go index 792a0d160c..87c533004f 100644 --- a/typedapi/security/invalidatetoken/response.go +++ b/typedapi/security/invalidatetoken/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package invalidatetoken @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package invalidatetoken // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/invalidate_token/SecurityInvalidateTokenResponse.ts#L23-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/invalidate_token/SecurityInvalidateTokenResponse.ts#L23-L30 type Response struct { ErrorCount int64 `json:"error_count"` ErrorDetails []types.ErrorCause `json:"error_details,omitempty"` diff --git a/typedapi/security/oidcauthenticate/oidc_authenticate.go b/typedapi/security/oidcauthenticate/oidc_authenticate.go index fef4e3bb3a..8315c5da72 100644 --- a/typedapi/security/oidcauthenticate/oidc_authenticate.go +++ b/typedapi/security/oidcauthenticate/oidc_authenticate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Exchanges an OpenID Connection authentication response message for an // Elasticsearch access token and refresh token pair @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strings" @@ -207,7 +206,7 @@ func (r OidcAuthenticate) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err diff --git a/typedapi/security/oidclogout/oidc_logout.go b/typedapi/security/oidclogout/oidc_logout.go index 18d2e954de..4c01a98e1c 100644 --- a/typedapi/security/oidclogout/oidc_logout.go +++ b/typedapi/security/oidclogout/oidc_logout.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Invalidates a refresh token and access token that was generated from the // OpenID Connect Authenticate API @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strings" @@ -207,7 +206,7 @@ func (r OidcLogout) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err diff --git a/typedapi/security/oidcprepareauthentication/oidc_prepare_authentication.go b/typedapi/security/oidcprepareauthentication/oidc_prepare_authentication.go index e44fb8c649..4165d15794 100644 --- a/typedapi/security/oidcprepareauthentication/oidc_prepare_authentication.go +++ b/typedapi/security/oidcprepareauthentication/oidc_prepare_authentication.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates an OAuth 2.0 authentication request as a URL string package oidcprepareauthentication @@ -26,7 +26,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strings" @@ -205,7 +204,7 @@ func (r OidcPrepareAuthentication) IsSuccess(providedCtx context.Context) (bool, if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err diff --git a/typedapi/security/putprivileges/put_privileges.go b/typedapi/security/putprivileges/put_privileges.go index bfb6f54c64..a20e418909 100644 --- a/typedapi/security/putprivileges/put_privileges.go +++ b/typedapi/security/putprivileges/put_privileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Adds or updates application privileges. package putprivileges @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -83,6 +84,8 @@ func New(tp elastictransport.Interface) *PutPrivileges { headers: make(http.Header), buf: gobytes.NewBuffer(nil), + + req: NewRequest(), } if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { @@ -299,3 +302,47 @@ func (r *PutPrivileges) Refresh(refresh refresh.Refresh) *PutPrivileges { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutPrivileges) ErrorTrace(errortrace bool) *PutPrivileges { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutPrivileges) FilterPath(filterpaths ...string) *PutPrivileges { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutPrivileges) Human(human bool) *PutPrivileges { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutPrivileges) Pretty(pretty bool) *PutPrivileges { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/putprivileges/request.go b/typedapi/security/putprivileges/request.go index 24e4d7e071..091fd8c3eb 100644 --- a/typedapi/security/putprivileges/request.go +++ b/typedapi/security/putprivileges/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putprivileges @@ -26,5 +26,12 @@ import ( // Request holds the request body struct for the package putprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/put_privileges/SecurityPutPrivilegesRequest.ts#L25-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/put_privileges/SecurityPutPrivilegesRequest.ts#L25-L37 type Request = map[string]map[string]types.PrivilegesActions + +// NewRequest returns a Request +func NewRequest() *Request { + r := make(map[string]map[string]types.PrivilegesActions, 0) + + return &r +} diff --git a/typedapi/security/putprivileges/response.go b/typedapi/security/putprivileges/response.go index 8361ed7ac5..d76022ace7 100644 --- a/typedapi/security/putprivileges/response.go +++ b/typedapi/security/putprivileges/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putprivileges @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putprivileges // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/put_privileges/SecurityPutPrivilegesResponse.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/put_privileges/SecurityPutPrivilegesResponse.ts#L23-L26 type Response map[string]map[string]types.CreatedStatus diff --git a/typedapi/security/putrole/put_role.go b/typedapi/security/putrole/put_role.go index 01d1a42479..4b6e03e053 100644 --- a/typedapi/security/putrole/put_role.go +++ b/typedapi/security/putrole/put_role.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Adds and updates roles in the native realm. +// The role management APIs are generally the preferred way to manage roles, +// rather than using file-based role management. +// The create or update roles API cannot update roles that are defined in roles +// files. package putrole import ( @@ -30,6 +33,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -82,7 +86,10 @@ func NewPutRoleFunc(tp elastictransport.Interface) NewPutRole { } } -// Adds and updates roles in the native realm. +// The role management APIs are generally the preferred way to manage roles, +// rather than using file-based role management. +// The create or update roles API cannot update roles that are defined in roles +// files. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-put-role.html func New(tp elastictransport.Interface) *PutRole { @@ -326,6 +333,50 @@ func (r *PutRole) Refresh(refresh refresh.Refresh) *PutRole { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutRole) ErrorTrace(errortrace bool) *PutRole { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutRole) FilterPath(filterpaths ...string) *PutRole { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutRole) Human(human bool) *PutRole { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutRole) Pretty(pretty bool) *PutRole { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Applications A list of application privilege entries. // API name: applications func (r *PutRole) Applications(applications ...types.ApplicationPrivileges) *PutRole { @@ -371,7 +422,9 @@ func (r *PutRole) Metadata(metadata types.Metadata) *PutRole { return r } -// RunAs A list of users that the owners of this role can impersonate. +// RunAs A list of users that the owners of this role can impersonate. *Note*: in +// Serverless, the run-as feature is disabled. For API compatibility, you can +// still specify an empty `run_as` field, but a non-empty list will be rejected. // API name: run_as func (r *PutRole) RunAs(runas ...string) *PutRole { r.req.RunAs = runas diff --git a/typedapi/security/putrole/request.go b/typedapi/security/putrole/request.go index 83f3296d05..037bc1e9a0 100644 --- a/typedapi/security/putrole/request.go +++ b/typedapi/security/putrole/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putrole @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putrole // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/put_role/SecurityPutRoleRequest.ts#L30-L79 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/put_role/SecurityPutRoleRequest.ts#L30-L80 type Request struct { // Applications A list of application privilege entries. @@ -50,7 +50,9 @@ type Request struct { // Metadata Optional metadata. Within the metadata object, keys that begin with an // underscore (`_`) are reserved for system use. Metadata types.Metadata `json:"metadata,omitempty"` - // RunAs A list of users that the owners of this role can impersonate. + // RunAs A list of users that the owners of this role can impersonate. *Note*: in + // Serverless, the run-as feature is disabled. For API compatibility, you can + // still specify an empty `run_as` field, but a non-empty list will be rejected. RunAs []string `json:"run_as,omitempty"` // TransientMetadata Indicates roles that might be incompatible with the current cluster license, // specifically roles with document and field level security. When the cluster @@ -67,6 +69,7 @@ func NewRequest() *Request { Global: make(map[string]json.RawMessage, 0), TransientMetadata: make(map[string]json.RawMessage, 0), } + return r } diff --git a/typedapi/security/putrole/response.go b/typedapi/security/putrole/response.go index 8af8dfeda5..8ea45f20df 100644 --- a/typedapi/security/putrole/response.go +++ b/typedapi/security/putrole/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putrole @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putrole // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/put_role/SecurityPutRoleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/put_role/SecurityPutRoleResponse.ts#L22-L24 type Response struct { Role types.CreatedStatus `json:"role"` } diff --git a/typedapi/security/putrolemapping/put_role_mapping.go b/typedapi/security/putrolemapping/put_role_mapping.go index a30a21c5d9..d8777b87b1 100644 --- a/typedapi/security/putrolemapping/put_role_mapping.go +++ b/typedapi/security/putrolemapping/put_role_mapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates and updates role mappings. package putrolemapping @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -325,6 +326,50 @@ func (r *PutRoleMapping) Refresh(refresh refresh.Refresh) *PutRoleMapping { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutRoleMapping) ErrorTrace(errortrace bool) *PutRoleMapping { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutRoleMapping) FilterPath(filterpaths ...string) *PutRoleMapping { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutRoleMapping) Human(human bool) *PutRoleMapping { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutRoleMapping) Pretty(pretty bool) *PutRoleMapping { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: enabled func (r *PutRoleMapping) Enabled(enabled bool) *PutRoleMapping { r.req.Enabled = &enabled diff --git a/typedapi/security/putrolemapping/request.go b/typedapi/security/putrolemapping/request.go index d22ac02149..fef03d920e 100644 --- a/typedapi/security/putrolemapping/request.go +++ b/typedapi/security/putrolemapping/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putrolemapping @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putrolemapping // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/put_role_mapping/SecurityPutRoleMappingRequest.ts#L25-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/put_role_mapping/SecurityPutRoleMappingRequest.ts#L25-L47 type Request struct { Enabled *bool `json:"enabled,omitempty"` Metadata types.Metadata `json:"metadata,omitempty"` @@ -46,6 +46,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -76,7 +77,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { switch t { case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/security/putrolemapping/response.go b/typedapi/security/putrolemapping/response.go index c74d9af948..2594f85544 100644 --- a/typedapi/security/putrolemapping/response.go +++ b/typedapi/security/putrolemapping/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putrolemapping @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package putrolemapping // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/put_role_mapping/SecurityPutRoleMappingResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/put_role_mapping/SecurityPutRoleMappingResponse.ts#L22-L24 type Response struct { Created *bool `json:"created,omitempty"` RoleMapping types.CreatedStatus `json:"role_mapping"` diff --git a/typedapi/security/putuser/put_user.go b/typedapi/security/putuser/put_user.go index 1a0dde7bfc..17e0c1f570 100644 --- a/typedapi/security/putuser/put_user.go +++ b/typedapi/security/putuser/put_user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Adds and updates users in the native realm. These users are commonly referred // to as native users. @@ -31,6 +31,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -327,9 +328,53 @@ func (r *PutUser) Refresh(refresh refresh.Refresh) *PutUser { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutUser) ErrorTrace(errortrace bool) *PutUser { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutUser) FilterPath(filterpaths ...string) *PutUser { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutUser) Human(human bool) *PutUser { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutUser) Pretty(pretty bool) *PutUser { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: email func (r *PutUser) Email(email string) *PutUser { - r.req.Email = email + r.req.Email = &email return r } @@ -343,7 +388,7 @@ func (r *PutUser) Enabled(enabled bool) *PutUser { // API name: full_name func (r *PutUser) FullName(fullname string) *PutUser { - r.req.FullName = fullname + r.req.FullName = &fullname return r } diff --git a/typedapi/security/putuser/request.go b/typedapi/security/putuser/request.go index 3769edf657..ef4eb9ab1c 100644 --- a/typedapi/security/putuser/request.go +++ b/typedapi/security/putuser/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putuser @@ -33,11 +33,11 @@ import ( // Request holds the request body struct for the package putuser // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/put_user/SecurityPutUserRequest.ts#L23-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/put_user/SecurityPutUserRequest.ts#L23-L44 type Request struct { - Email string `json:"email,omitempty"` + Email *string `json:"email,omitempty"` Enabled *bool `json:"enabled,omitempty"` - FullName string `json:"full_name,omitempty"` + FullName *string `json:"full_name,omitempty"` Metadata types.Metadata `json:"metadata,omitempty"` Password *string `json:"password,omitempty"` PasswordHash *string `json:"password_hash,omitempty"` @@ -48,6 +48,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -87,10 +88,10 @@ func (s *Request) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.Email = o + s.Email = &o case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,7 +114,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.FullName = o + s.FullName = &o case "metadata": if err := dec.Decode(&s.Metadata); err != nil { diff --git a/typedapi/security/putuser/response.go b/typedapi/security/putuser/response.go index d79832b3cf..7d333bd48e 100644 --- a/typedapi/security/putuser/response.go +++ b/typedapi/security/putuser/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putuser // Response holds the response body struct for the package putuser // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/put_user/SecurityPutUserResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/put_user/SecurityPutUserResponse.ts#L20-L22 type Response struct { Created bool `json:"created"` } diff --git a/typedapi/security/queryapikeys/query_api_keys.go b/typedapi/security/queryapikeys/query_api_keys.go index ff90039852..fd71e81562 100644 --- a/typedapi/security/queryapikeys/query_api_keys.go +++ b/typedapi/security/queryapikeys/query_api_keys.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Retrieves information for API keys using a subset of query DSL +// Retrieves information for API keys in a paginated manner. You can optionally +// filter the results with a query. package queryapikeys import ( @@ -73,7 +74,8 @@ func NewQueryApiKeysFunc(tp elastictransport.Interface) NewQueryApiKeys { } } -// Retrieves information for API keys using a subset of query DSL +// Retrieves information for API keys in a paginated manner. You can optionally +// filter the results with a query. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-query-api-key.html func New(tp elastictransport.Interface) *QueryApiKeys { @@ -247,6 +249,8 @@ func (r QueryApiKeys) Do(providedCtx context.Context) (*Response, error) { response := NewResponse() + r.TypedKeys(true) + res, err := r.Perform(ctx) if err != nil { if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { @@ -305,6 +309,68 @@ func (r *QueryApiKeys) WithLimitedBy(withlimitedby bool) *QueryApiKeys { return r } +// WithProfileUid Determines whether to also retrieve the profile uid, for the API key owner +// principal, if it exists. +// API name: with_profile_uid +func (r *QueryApiKeys) WithProfileUid(withprofileuid bool) *QueryApiKeys { + r.values.Set("with_profile_uid", strconv.FormatBool(withprofileuid)) + + return r +} + +// TypedKeys Determines whether aggregation names are prefixed by their respective types +// in the response. +// API name: typed_keys +func (r *QueryApiKeys) TypedKeys(typedkeys bool) *QueryApiKeys { + r.values.Set("typed_keys", strconv.FormatBool(typedkeys)) + + return r +} + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *QueryApiKeys) ErrorTrace(errortrace bool) *QueryApiKeys { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *QueryApiKeys) FilterPath(filterpaths ...string) *QueryApiKeys { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *QueryApiKeys) Human(human bool) *QueryApiKeys { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *QueryApiKeys) Pretty(pretty bool) *QueryApiKeys { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Aggregations Any aggregations to run over the corpus of returned API keys. // Aggregations and queries work together. Aggregations are computed only on the // API keys that match the query. @@ -314,7 +380,7 @@ func (r *QueryApiKeys) WithLimitedBy(withlimitedby bool) *QueryApiKeys { // Additionally, aggregations only run over the same subset of fields that query // works with. // API name: aggregations -func (r *QueryApiKeys) Aggregations(aggregations map[string]types.APIKeyAggregationContainer) *QueryApiKeys { +func (r *QueryApiKeys) Aggregations(aggregations map[string]types.ApiKeyAggregationContainer) *QueryApiKeys { r.req.Aggregations = aggregations @@ -342,7 +408,7 @@ func (r *QueryApiKeys) From(from int) *QueryApiKeys { // `creation`, `expiration`, `invalidated`, `invalidation`, `username`, `realm`, // and `metadata`. // API name: query -func (r *QueryApiKeys) Query(query *types.APIKeyQueryContainer) *QueryApiKeys { +func (r *QueryApiKeys) Query(query *types.ApiKeyQueryContainer) *QueryApiKeys { r.req.Query = query diff --git a/typedapi/security/queryapikeys/request.go b/typedapi/security/queryapikeys/request.go index 7991e5a6cb..e84738b78b 100644 --- a/typedapi/security/queryapikeys/request.go +++ b/typedapi/security/queryapikeys/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package queryapikeys @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package queryapikeys // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/query_api_keys/QueryApiKeysRequest.ts#L26-L86 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/query_api_keys/QueryApiKeysRequest.ts#L26-L99 type Request struct { // Aggregations Any aggregations to run over the corpus of returned API keys. @@ -44,7 +44,7 @@ type Request struct { // `cardinality`, `value_count`, `composite`, `filter`, and `filters`. // Additionally, aggregations only run over the same subset of fields that query // works with. - Aggregations map[string]types.APIKeyAggregationContainer `json:"aggregations,omitempty"` + Aggregations map[string]types.ApiKeyAggregationContainer `json:"aggregations,omitempty"` // From Starting document offset. // By default, you cannot page through more than 10,000 hits using the from and // size parameters. @@ -59,7 +59,7 @@ type Request struct { // `id`, `type`, `name`, // `creation`, `expiration`, `invalidated`, `invalidation`, `username`, `realm`, // and `metadata`. - Query *types.APIKeyQueryContainer `json:"query,omitempty"` + Query *types.ApiKeyQueryContainer `json:"query,omitempty"` // SearchAfter Search after definition SearchAfter []types.FieldValue `json:"search_after,omitempty"` // Size The number of hits to return. @@ -76,8 +76,9 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{ - Aggregations: make(map[string]types.APIKeyAggregationContainer, 0), + Aggregations: make(map[string]types.ApiKeyAggregationContainer, 0), } + return r } @@ -109,7 +110,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "aggregations", "aggs": if s.Aggregations == nil { - s.Aggregations = make(map[string]types.APIKeyAggregationContainer, 0) + s.Aggregations = make(map[string]types.ApiKeyAggregationContainer, 0) } if err := dec.Decode(&s.Aggregations); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -117,7 +118,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "from": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -143,7 +144,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/security/queryapikeys/response.go b/typedapi/security/queryapikeys/response.go index 4309737ca3..8a6c0c8254 100644 --- a/typedapi/security/queryapikeys/response.go +++ b/typedapi/security/queryapikeys/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package queryapikeys @@ -34,11 +34,11 @@ import ( // Response holds the response body struct for the package queryapikeys // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/query_api_keys/QueryApiKeysResponse.ts#L26-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/query_api_keys/QueryApiKeysResponse.ts#L26-L45 type Response struct { // Aggregations The aggregations result, if requested. - Aggregations map[string]types.APIKeyAggregate `json:"aggregations,omitempty"` + Aggregations map[string]types.ApiKeyAggregate `json:"aggregations,omitempty"` // ApiKeys A list of API key information. ApiKeys []types.ApiKey `json:"api_keys"` // Count The number of API keys returned in the response. @@ -50,7 +50,7 @@ type Response struct { // NewResponse returns a Response func NewResponse() *Response { r := &Response{ - Aggregations: make(map[string]types.APIKeyAggregate, 0), + Aggregations: make(map[string]types.ApiKeyAggregate, 0), } return r } @@ -71,7 +71,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "aggregations": if s.Aggregations == nil { - s.Aggregations = make(map[string]types.APIKeyAggregate, 0) + s.Aggregations = make(map[string]types.ApiKeyAggregate, 0) } for dec.More() { @@ -87,7 +87,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { elems := strings.Split(value, "#") if len(elems) == 2 { if s.Aggregations == nil { - s.Aggregations = make(map[string]types.APIKeyAggregate, 0) + s.Aggregations = make(map[string]types.ApiKeyAggregate, 0) } switch elems[0] { @@ -183,7 +183,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -193,7 +193,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -209,7 +209,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -225,7 +225,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/security/queryuser/query_user.go b/typedapi/security/queryuser/query_user.go new file mode 100644 index 0000000000..62435e6e77 --- /dev/null +++ b/typedapi/security/queryuser/query_user.go @@ -0,0 +1,233 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Retrieves information for Users using a subset of query DSL +package queryuser + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type QueryUser struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + raw io.Reader + + paramSet int + + spanStarted bool + + instrument elastictransport.Instrumentation +} + +// NewQueryUser type alias for index. +type NewQueryUser func() *QueryUser + +// NewQueryUserFunc returns a new instance of QueryUser with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewQueryUserFunc(tp elastictransport.Interface) NewQueryUser { + return func() *QueryUser { + n := New(tp) + + return n + } +} + +// Retrieves information for Users using a subset of query DSL +// +// https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-query-user.html +func New(tp elastictransport.Interface) *QueryUser { + r := &QueryUser{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + } + + if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { + if instrument := instrumented.InstrumentationEnabled(); instrument != nil { + r.instrument = instrument + } + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *QueryUser) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == 0: + path.WriteString("/") + path.WriteString("_security") + path.WriteString("/") + path.WriteString("_query") + path.WriteString("/") + path.WriteString("user") + + method = http.MethodPost + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.raw) + } else { + req, err = http.NewRequest(method, r.path.String(), r.raw) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Content-Type") == "" { + if r.raw != nil { + req.Header.Set("Content-Type", "application/vnd.elasticsearch+json;compatible-with=8") + } + } + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r QueryUser) Perform(providedCtx context.Context) (*http.Response, error) { + var ctx context.Context + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + if r.spanStarted == false { + ctx := instrument.Start(providedCtx, "security.query_user") + defer instrument.Close(ctx) + } + } + if ctx == nil { + ctx = providedCtx + } + + req, err := r.HttpRequest(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.BeforeRequest(req, "security.query_user") + if reader := instrument.RecordRequestBody(ctx, "security.query_user", r.raw); reader != nil { + req.Body = reader + } + } + res, err := r.transport.Perform(req) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "security.query_user") + } + if err != nil { + localErr := fmt.Errorf("an error happened during the QueryUser query execution: %w", err) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, localErr) + } + return nil, localErr + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a queryuser.Response +func (r QueryUser) Do(ctx context.Context) (bool, error) { + return r.IsSuccess(ctx) +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r QueryUser) IsSuccess(providedCtx context.Context) (bool, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "security.query_user") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(io.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + if res.StatusCode != 404 { + err := fmt.Errorf("an error happened during the QueryUser query execution, status code: %d", res.StatusCode) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return false, err + } + + return false, nil +} + +// Header set a key, value pair in the QueryUser headers map. +func (r *QueryUser) Header(key, value string) *QueryUser { + r.headers.Set(key, value) + + return r +} diff --git a/typedapi/security/samlauthenticate/request.go b/typedapi/security/samlauthenticate/request.go index 643c7081bd..8cfa344d98 100644 --- a/typedapi/security/samlauthenticate/request.go +++ b/typedapi/security/samlauthenticate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package samlauthenticate @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package samlauthenticate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/saml_authenticate/Request.ts#L23-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/saml_authenticate/Request.ts#L23-L38 type Request struct { // Content The SAML response as it was sent by the user’s browser, usually a Base64 @@ -48,6 +48,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/security/samlauthenticate/response.go b/typedapi/security/samlauthenticate/response.go index 72e74d6a73..1e1ed9c08d 100644 --- a/typedapi/security/samlauthenticate/response.go +++ b/typedapi/security/samlauthenticate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package samlauthenticate // Response holds the response body struct for the package samlauthenticate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/saml_authenticate/Response.ts#L22-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/saml_authenticate/Response.ts#L22-L30 type Response struct { AccessToken string `json:"access_token"` ExpiresIn int `json:"expires_in"` diff --git a/typedapi/security/samlauthenticate/saml_authenticate.go b/typedapi/security/samlauthenticate/saml_authenticate.go index 9fb151559a..125eab58e4 100644 --- a/typedapi/security/samlauthenticate/saml_authenticate.go +++ b/typedapi/security/samlauthenticate/saml_authenticate.go @@ -16,10 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Exchanges a SAML Response message for an Elasticsearch access token and -// refresh token pair +// Submits a SAML Response message to Elasticsearch for consumption. package samlauthenticate import ( @@ -31,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -73,8 +73,7 @@ func NewSamlAuthenticateFunc(tp elastictransport.Interface) NewSamlAuthenticate } } -// Exchanges a SAML Response message for an Elasticsearch access token and -// refresh token pair +// Submits a SAML Response message to Elasticsearch for consumption. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-authenticate.html func New(tp elastictransport.Interface) *SamlAuthenticate { @@ -295,6 +294,50 @@ func (r *SamlAuthenticate) Header(key, value string) *SamlAuthenticate { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *SamlAuthenticate) ErrorTrace(errortrace bool) *SamlAuthenticate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *SamlAuthenticate) FilterPath(filterpaths ...string) *SamlAuthenticate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *SamlAuthenticate) Human(human bool) *SamlAuthenticate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *SamlAuthenticate) Pretty(pretty bool) *SamlAuthenticate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Content The SAML response as it was sent by the user’s browser, usually a Base64 // encoded XML document. // API name: content diff --git a/typedapi/security/samlcompletelogout/request.go b/typedapi/security/samlcompletelogout/request.go index cd3fd175b0..8216a03887 100644 --- a/typedapi/security/samlcompletelogout/request.go +++ b/typedapi/security/samlcompletelogout/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package samlcompletelogout @@ -31,7 +31,7 @@ import ( // Request holds the request body struct for the package samlcompletelogout // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/saml_complete_logout/Request.ts#L23-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/saml_complete_logout/Request.ts#L23-L40 type Request struct { // Content If the SAML IdP sends the logout response with the HTTP-Post binding, this @@ -52,6 +52,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/security/samlcompletelogout/saml_complete_logout.go b/typedapi/security/samlcompletelogout/saml_complete_logout.go index 34742ea531..8900433266 100644 --- a/typedapi/security/samlcompletelogout/saml_complete_logout.go +++ b/typedapi/security/samlcompletelogout/saml_complete_logout.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Verifies the logout response sent from the SAML IdP +// Verifies the logout response sent from the SAML IdP. package samlcompletelogout import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -71,7 +72,7 @@ func NewSamlCompleteLogoutFunc(tp elastictransport.Interface) NewSamlCompleteLog } } -// Verifies the logout response sent from the SAML IdP +// Verifies the logout response sent from the SAML IdP. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-complete-logout.html func New(tp elastictransport.Interface) *SamlCompleteLogout { @@ -238,6 +239,50 @@ func (r *SamlCompleteLogout) Header(key, value string) *SamlCompleteLogout { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *SamlCompleteLogout) ErrorTrace(errortrace bool) *SamlCompleteLogout { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *SamlCompleteLogout) FilterPath(filterpaths ...string) *SamlCompleteLogout { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *SamlCompleteLogout) Human(human bool) *SamlCompleteLogout { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *SamlCompleteLogout) Pretty(pretty bool) *SamlCompleteLogout { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Content If the SAML IdP sends the logout response with the HTTP-Post binding, this // field must be set to the value of the SAMLResponse form parameter from the // logout response. diff --git a/typedapi/security/samlinvalidate/request.go b/typedapi/security/samlinvalidate/request.go index f54096e843..051e96f0a0 100644 --- a/typedapi/security/samlinvalidate/request.go +++ b/typedapi/security/samlinvalidate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package samlinvalidate @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package samlinvalidate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/saml_invalidate/Request.ts#L22-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/saml_invalidate/Request.ts#L22-L43 type Request struct { // Acs The Assertion Consumer Service URL that matches the one of the SAML realm in @@ -55,6 +55,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/security/samlinvalidate/response.go b/typedapi/security/samlinvalidate/response.go index 36954d0fdb..e15f44447a 100644 --- a/typedapi/security/samlinvalidate/response.go +++ b/typedapi/security/samlinvalidate/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package samlinvalidate // Response holds the response body struct for the package samlinvalidate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/saml_invalidate/Response.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/saml_invalidate/Response.ts#L22-L28 type Response struct { Invalidated int `json:"invalidated"` Realm string `json:"realm"` diff --git a/typedapi/security/samlinvalidate/saml_invalidate.go b/typedapi/security/samlinvalidate/saml_invalidate.go index a9494fa992..47a0072a5a 100644 --- a/typedapi/security/samlinvalidate/saml_invalidate.go +++ b/typedapi/security/samlinvalidate/saml_invalidate.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Consumes a SAML LogoutRequest +// Submits a SAML LogoutRequest message to Elasticsearch for consumption. package samlinvalidate import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -72,7 +73,7 @@ func NewSamlInvalidateFunc(tp elastictransport.Interface) NewSamlInvalidate { } } -// Consumes a SAML LogoutRequest +// Submits a SAML LogoutRequest message to Elasticsearch for consumption. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-invalidate.html func New(tp elastictransport.Interface) *SamlInvalidate { @@ -293,6 +294,50 @@ func (r *SamlInvalidate) Header(key, value string) *SamlInvalidate { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *SamlInvalidate) ErrorTrace(errortrace bool) *SamlInvalidate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *SamlInvalidate) FilterPath(filterpaths ...string) *SamlInvalidate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *SamlInvalidate) Human(human bool) *SamlInvalidate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *SamlInvalidate) Pretty(pretty bool) *SamlInvalidate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Acs The Assertion Consumer Service URL that matches the one of the SAML realm in // Elasticsearch that should be used. You must specify either this parameter or // the realm parameter. diff --git a/typedapi/security/samllogout/request.go b/typedapi/security/samllogout/request.go index 585818bec7..13e137d79e 100644 --- a/typedapi/security/samllogout/request.go +++ b/typedapi/security/samllogout/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package samllogout @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package samllogout // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/saml_logout/Request.ts#L22-L41 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/saml_logout/Request.ts#L22-L41 type Request struct { // RefreshToken The refresh token that was returned as a response to calling the SAML @@ -45,6 +45,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/security/samllogout/response.go b/typedapi/security/samllogout/response.go index 8feb99c485..96fe049000 100644 --- a/typedapi/security/samllogout/response.go +++ b/typedapi/security/samllogout/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package samllogout // Response holds the response body struct for the package samllogout // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/saml_logout/Response.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/saml_logout/Response.ts#L20-L24 type Response struct { Redirect string `json:"redirect"` } diff --git a/typedapi/security/samllogout/saml_logout.go b/typedapi/security/samllogout/saml_logout.go index fa395b839d..b5822bb377 100644 --- a/typedapi/security/samllogout/saml_logout.go +++ b/typedapi/security/samllogout/saml_logout.go @@ -16,10 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Invalidates an access token and a refresh token that were generated via the -// SAML Authenticate API +// Submits a request to invalidate an access token and refresh token. package samllogout import ( @@ -31,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -73,8 +73,7 @@ func NewSamlLogoutFunc(tp elastictransport.Interface) NewSamlLogout { } } -// Invalidates an access token and a refresh token that were generated via the -// SAML Authenticate API +// Submits a request to invalidate an access token and refresh token. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-logout.html func New(tp elastictransport.Interface) *SamlLogout { @@ -295,6 +294,50 @@ func (r *SamlLogout) Header(key, value string) *SamlLogout { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *SamlLogout) ErrorTrace(errortrace bool) *SamlLogout { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *SamlLogout) FilterPath(filterpaths ...string) *SamlLogout { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *SamlLogout) Human(human bool) *SamlLogout { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *SamlLogout) Pretty(pretty bool) *SamlLogout { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // RefreshToken The refresh token that was returned as a response to calling the SAML // authenticate API. // Alternatively, the most recent refresh token that was received after diff --git a/typedapi/security/samlprepareauthentication/request.go b/typedapi/security/samlprepareauthentication/request.go index c510574811..cca4194626 100644 --- a/typedapi/security/samlprepareauthentication/request.go +++ b/typedapi/security/samlprepareauthentication/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package samlprepareauthentication @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package samlprepareauthentication // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/saml_prepare_authentication/Request.ts#L22-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/saml_prepare_authentication/Request.ts#L22-L46 type Request struct { // Acs The Assertion Consumer Service URL that matches the one of the SAML realms in @@ -49,6 +49,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/security/samlprepareauthentication/response.go b/typedapi/security/samlprepareauthentication/response.go index 4e179ebb84..262d91546a 100644 --- a/typedapi/security/samlprepareauthentication/response.go +++ b/typedapi/security/samlprepareauthentication/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package samlprepareauthentication // Response holds the response body struct for the package samlprepareauthentication // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/saml_prepare_authentication/Response.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/saml_prepare_authentication/Response.ts#L22-L28 type Response struct { Id string `json:"id"` Realm string `json:"realm"` diff --git a/typedapi/security/samlprepareauthentication/saml_prepare_authentication.go b/typedapi/security/samlprepareauthentication/saml_prepare_authentication.go index d2c6d2fd07..277f37cebc 100644 --- a/typedapi/security/samlprepareauthentication/saml_prepare_authentication.go +++ b/typedapi/security/samlprepareauthentication/saml_prepare_authentication.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates a SAML authentication request +// Creates a SAML authentication request () as a URL string, based +// on the configuration of the respective SAML realm in Elasticsearch. package samlprepareauthentication import ( @@ -30,6 +31,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -72,7 +74,8 @@ func NewSamlPrepareAuthenticationFunc(tp elastictransport.Interface) NewSamlPrep } } -// Creates a SAML authentication request +// Creates a SAML authentication request () as a URL string, based +// on the configuration of the respective SAML realm in Elasticsearch. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-prepare-authentication.html func New(tp elastictransport.Interface) *SamlPrepareAuthentication { @@ -293,6 +296,50 @@ func (r *SamlPrepareAuthentication) Header(key, value string) *SamlPrepareAuthen return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *SamlPrepareAuthentication) ErrorTrace(errortrace bool) *SamlPrepareAuthentication { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *SamlPrepareAuthentication) FilterPath(filterpaths ...string) *SamlPrepareAuthentication { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *SamlPrepareAuthentication) Human(human bool) *SamlPrepareAuthentication { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *SamlPrepareAuthentication) Pretty(pretty bool) *SamlPrepareAuthentication { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Acs The Assertion Consumer Service URL that matches the one of the SAML realms in // Elasticsearch. // The realm is used to generate the authentication request. You must specify diff --git a/typedapi/security/samlserviceprovidermetadata/response.go b/typedapi/security/samlserviceprovidermetadata/response.go index 8e1e82220a..ba3945dd3c 100644 --- a/typedapi/security/samlserviceprovidermetadata/response.go +++ b/typedapi/security/samlserviceprovidermetadata/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package samlserviceprovidermetadata // Response holds the response body struct for the package samlserviceprovidermetadata // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/saml_service_provider_metadata/Response.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/saml_service_provider_metadata/Response.ts#L20-L24 type Response struct { Metadata string `json:"metadata"` } diff --git a/typedapi/security/samlserviceprovidermetadata/saml_service_provider_metadata.go b/typedapi/security/samlserviceprovidermetadata/saml_service_provider_metadata.go index 53f3e98d49..20a3ba9be4 100644 --- a/typedapi/security/samlserviceprovidermetadata/saml_service_provider_metadata.go +++ b/typedapi/security/samlserviceprovidermetadata/saml_service_provider_metadata.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Generates SAML metadata for the Elastic stack SAML 2.0 Service Provider +// Generate SAML metadata for a SAML 2.0 Service Provider. package samlserviceprovidermetadata import ( @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +76,7 @@ func NewSamlServiceProviderMetadataFunc(tp elastictransport.Interface) NewSamlSe } } -// Generates SAML metadata for the Elastic stack SAML 2.0 Service Provider +// Generate SAML metadata for a SAML 2.0 Service Provider. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-saml-sp-metadata.html func New(tp elastictransport.Interface) *SamlServiceProviderMetadata { @@ -270,7 +270,7 @@ func (r SamlServiceProviderMetadata) IsSuccess(providedCtx context.Context) (boo if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -306,3 +306,47 @@ func (r *SamlServiceProviderMetadata) _realmname(realmname string) *SamlServiceP return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *SamlServiceProviderMetadata) ErrorTrace(errortrace bool) *SamlServiceProviderMetadata { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *SamlServiceProviderMetadata) FilterPath(filterpaths ...string) *SamlServiceProviderMetadata { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *SamlServiceProviderMetadata) Human(human bool) *SamlServiceProviderMetadata { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *SamlServiceProviderMetadata) Pretty(pretty bool) *SamlServiceProviderMetadata { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/security/suggestuserprofiles/request.go b/typedapi/security/suggestuserprofiles/request.go index 02cbcd36eb..e884fd5116 100644 --- a/typedapi/security/suggestuserprofiles/request.go +++ b/typedapi/security/suggestuserprofiles/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package suggestuserprofiles @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package suggestuserprofiles // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/suggest_user_profiles/Request.ts#L24-L66 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/suggest_user_profiles/Request.ts#L24-L66 type Request struct { // Data List of filters for the `data` field of the profile document. @@ -56,6 +56,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -119,7 +120,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Name = &o case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/security/suggestuserprofiles/response.go b/typedapi/security/suggestuserprofiles/response.go index 2d702dd536..a7d84e24aa 100644 --- a/typedapi/security/suggestuserprofiles/response.go +++ b/typedapi/security/suggestuserprofiles/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package suggestuserprofiles @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package suggestuserprofiles // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/suggest_user_profiles/Response.ts#L29-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/suggest_user_profiles/Response.ts#L29-L35 type Response struct { Profiles []types.UserProfile `json:"profiles"` Took int64 `json:"took"` diff --git a/typedapi/security/suggestuserprofiles/suggest_user_profiles.go b/typedapi/security/suggestuserprofiles/suggest_user_profiles.go index 86b54cacff..6785c8e382 100644 --- a/typedapi/security/suggestuserprofiles/suggest_user_profiles.go +++ b/typedapi/security/suggestuserprofiles/suggest_user_profiles.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Get suggestions for user profiles that match specified search criteria. package suggestuserprofiles @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -293,6 +294,50 @@ func (r *SuggestUserProfiles) Header(key, value string) *SuggestUserProfiles { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *SuggestUserProfiles) ErrorTrace(errortrace bool) *SuggestUserProfiles { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *SuggestUserProfiles) FilterPath(filterpaths ...string) *SuggestUserProfiles { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *SuggestUserProfiles) Human(human bool) *SuggestUserProfiles { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *SuggestUserProfiles) Pretty(pretty bool) *SuggestUserProfiles { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Data List of filters for the `data` field of the profile document. // To return all content use `data=*`. To return a subset of content // use `data=` to retrieve content nested under the specified ``. diff --git a/typedapi/security/updateapikey/request.go b/typedapi/security/updateapikey/request.go index fc40a6e63f..0908796a54 100644 --- a/typedapi/security/updateapikey/request.go +++ b/typedapi/security/updateapikey/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updateapikey @@ -32,7 +32,7 @@ import ( // Request holds the request body struct for the package updateapikey // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/update_api_key/Request.ts#L26-L65 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/update_api_key/Request.ts#L26-L65 type Request struct { // Expiration Expiration time for the API key. @@ -57,6 +57,7 @@ func NewRequest() *Request { r := &Request{ RoleDescriptors: make(map[string]types.RoleDescriptor, 0), } + return r } diff --git a/typedapi/security/updateapikey/response.go b/typedapi/security/updateapikey/response.go index 4bca52d77d..6808985ec3 100644 --- a/typedapi/security/updateapikey/response.go +++ b/typedapi/security/updateapikey/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updateapikey // Response holds the response body struct for the package updateapikey // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/update_api_key/Response.ts#L20-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/update_api_key/Response.ts#L20-L28 type Response struct { // Updated If `true`, the API key was updated. diff --git a/typedapi/security/updateapikey/update_api_key.go b/typedapi/security/updateapikey/update_api_key.go index 76e3925cce..d600ff3bbb 100644 --- a/typedapi/security/updateapikey/update_api_key.go +++ b/typedapi/security/updateapikey/update_api_key.go @@ -16,9 +16,32 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Updates attributes of an existing API key. +// Users can only update API keys that they created or that were granted to +// them. +// Use this API to update API keys created by the create API Key or grant API +// Key APIs. +// If you need to apply the same update to many API keys, you can use bulk +// update API Keys to reduce overhead. +// It’s not possible to update expired API keys, or API keys that have been +// invalidated by invalidate API Key. +// This API supports updates to an API key’s access scope and metadata. +// The access scope of an API key is derived from the `role_descriptors` you +// specify in the request, and a snapshot of the owner user’s permissions at the +// time of the request. +// The snapshot of the owner’s permissions is updated automatically on every +// call. +// If you don’t specify `role_descriptors` in the request, a call to this API +// might still change the API key’s access scope. +// This change can occur if the owner user’s permissions have changed since the +// API key was created or last modified. +// To update another user’s API key, use the `run_as` feature to submit a +// request on behalf of another user. +// IMPORTANT: It’s not possible to use an API key as the authentication +// credential for this API. +// To update an API key, the owner user’s credentials are required. package updateapikey import ( @@ -30,6 +53,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -81,6 +105,29 @@ func NewUpdateApiKeyFunc(tp elastictransport.Interface) NewUpdateApiKey { } // Updates attributes of an existing API key. +// Users can only update API keys that they created or that were granted to +// them. +// Use this API to update API keys created by the create API Key or grant API +// Key APIs. +// If you need to apply the same update to many API keys, you can use bulk +// update API Keys to reduce overhead. +// It’s not possible to update expired API keys, or API keys that have been +// invalidated by invalidate API Key. +// This API supports updates to an API key’s access scope and metadata. +// The access scope of an API key is derived from the `role_descriptors` you +// specify in the request, and a snapshot of the owner user’s permissions at the +// time of the request. +// The snapshot of the owner’s permissions is updated automatically on every +// call. +// If you don’t specify `role_descriptors` in the request, a call to this API +// might still change the API key’s access scope. +// This change can occur if the owner user’s permissions have changed since the +// API key was created or last modified. +// To update another user’s API key, use the `run_as` feature to submit a +// request on behalf of another user. +// IMPORTANT: It’s not possible to use an API key as the authentication +// credential for this API. +// To update an API key, the owner user’s credentials are required. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-update-api-key.html func New(tp elastictransport.Interface) *UpdateApiKey { @@ -314,6 +361,50 @@ func (r *UpdateApiKey) _id(id string) *UpdateApiKey { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *UpdateApiKey) ErrorTrace(errortrace bool) *UpdateApiKey { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *UpdateApiKey) FilterPath(filterpaths ...string) *UpdateApiKey { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *UpdateApiKey) Human(human bool) *UpdateApiKey { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *UpdateApiKey) Pretty(pretty bool) *UpdateApiKey { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Expiration Expiration time for the API key. // API name: expiration func (r *UpdateApiKey) Expiration(duration types.Duration) *UpdateApiKey { diff --git a/typedapi/security/updatesettings/update_settings.go b/typedapi/security/updatesettings/update_settings.go index a357136852..1f1f210b43 100644 --- a/typedapi/security/updatesettings/update_settings.go +++ b/typedapi/security/updatesettings/update_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Update settings for the security system index package updatesettings @@ -26,7 +26,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strings" @@ -203,7 +202,7 @@ func (r UpdateSettings) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err diff --git a/typedapi/security/updateuserprofiledata/request.go b/typedapi/security/updateuserprofiledata/request.go index 7ba804c26b..b7308513d7 100644 --- a/typedapi/security/updateuserprofiledata/request.go +++ b/typedapi/security/updateuserprofiledata/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updateuserprofiledata @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package updateuserprofiledata // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/update_user_profile_data/Request.ts#L27-L70 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/update_user_profile_data/Request.ts#L27-L70 type Request struct { // Data Non-searchable data that you want to associate with the user profile. @@ -44,6 +44,7 @@ func NewRequest() *Request { Data: make(map[string]json.RawMessage, 0), Labels: make(map[string]json.RawMessage, 0), } + return r } diff --git a/typedapi/security/updateuserprofiledata/response.go b/typedapi/security/updateuserprofiledata/response.go index ee3dbf162d..2256d84fa7 100644 --- a/typedapi/security/updateuserprofiledata/response.go +++ b/typedapi/security/updateuserprofiledata/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updateuserprofiledata // Response holds the response body struct for the package updateuserprofiledata // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/update_user_profile_data/Response.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/update_user_profile_data/Response.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/security/updateuserprofiledata/update_user_profile_data.go b/typedapi/security/updateuserprofiledata/update_user_profile_data.go index bca3b5aad8..d147fdd8f4 100644 --- a/typedapi/security/updateuserprofiledata/update_user_profile_data.go +++ b/typedapi/security/updateuserprofiledata/update_user_profile_data.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Update application specific data for the user profile of the given unique ID. +// Updates specific data for the user profile that's associated with the +// specified unique ID. package updateuserprofiledata import ( @@ -30,6 +31,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -81,7 +83,8 @@ func NewUpdateUserProfileDataFunc(tp elastictransport.Interface) NewUpdateUserPr } } -// Update application specific data for the user profile of the given unique ID. +// Updates specific data for the user profile that's associated with the +// specified unique ID. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-update-user-profile-data.html func New(tp elastictransport.Interface) *UpdateUserProfileData { @@ -344,6 +347,50 @@ func (r *UpdateUserProfileData) Refresh(refresh refresh.Refresh) *UpdateUserProf return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *UpdateUserProfileData) ErrorTrace(errortrace bool) *UpdateUserProfileData { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *UpdateUserProfileData) FilterPath(filterpaths ...string) *UpdateUserProfileData { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *UpdateUserProfileData) Human(human bool) *UpdateUserProfileData { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *UpdateUserProfileData) Pretty(pretty bool) *UpdateUserProfileData { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Data Non-searchable data that you want to associate with the user profile. // This field supports a nested data structure. // API name: data diff --git a/typedapi/shutdown/deletenode/delete_node.go b/typedapi/shutdown/deletenode/delete_node.go index 5bcc7a1653..ec3cfb326d 100644 --- a/typedapi/shutdown/deletenode/delete_node.go +++ b/typedapi/shutdown/deletenode/delete_node.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Removes a node from the shutdown list. Designed for indirect use by ECE/ESS // and ECK. Direct use is not supported. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -271,7 +271,7 @@ func (r DeleteNode) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -325,3 +325,47 @@ func (r *DeleteNode) Timeout(timeout timeunit.TimeUnit) *DeleteNode { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteNode) ErrorTrace(errortrace bool) *DeleteNode { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteNode) FilterPath(filterpaths ...string) *DeleteNode { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteNode) Human(human bool) *DeleteNode { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteNode) Pretty(pretty bool) *DeleteNode { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/shutdown/deletenode/response.go b/typedapi/shutdown/deletenode/response.go index 7b2a0c08a8..a0cf05e998 100644 --- a/typedapi/shutdown/deletenode/response.go +++ b/typedapi/shutdown/deletenode/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletenode // Response holds the response body struct for the package deletenode // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/shutdown/delete_node/ShutdownDeleteNodeResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/shutdown/delete_node/ShutdownDeleteNodeResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/shutdown/getnode/get_node.go b/typedapi/shutdown/getnode/get_node.go index 2f6a2acd61..60f4a55d7b 100644 --- a/typedapi/shutdown/getnode/get_node.go +++ b/typedapi/shutdown/getnode/get_node.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieve status of a node or nodes that are currently marked as shutting // down. Designed for indirect use by ECE/ESS and ECK. Direct use is not @@ -29,9 +29,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -278,7 +278,7 @@ func (r GetNode) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -332,3 +332,47 @@ func (r *GetNode) Timeout(timeout timeunit.TimeUnit) *GetNode { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetNode) ErrorTrace(errortrace bool) *GetNode { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetNode) FilterPath(filterpaths ...string) *GetNode { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetNode) Human(human bool) *GetNode { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetNode) Pretty(pretty bool) *GetNode { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/shutdown/getnode/response.go b/typedapi/shutdown/getnode/response.go index 3dc0e54102..d18c196cd3 100644 --- a/typedapi/shutdown/getnode/response.go +++ b/typedapi/shutdown/getnode/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getnode @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getnode // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L23-L27 type Response struct { Nodes []types.NodeShutdownStatus `json:"nodes"` } diff --git a/typedapi/shutdown/putnode/put_node.go b/typedapi/shutdown/putnode/put_node.go index e635b508ec..956c4bdbfe 100644 --- a/typedapi/shutdown/putnode/put_node.go +++ b/typedapi/shutdown/putnode/put_node.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Adds a node to be shut down. Designed for indirect use by ECE/ESS and ECK. // Direct use is not supported. @@ -31,6 +31,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -336,6 +337,50 @@ func (r *PutNode) Timeout(timeout timeunit.TimeUnit) *PutNode { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutNode) ErrorTrace(errortrace bool) *PutNode { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutNode) FilterPath(filterpaths ...string) *PutNode { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutNode) Human(human bool) *PutNode { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutNode) Pretty(pretty bool) *PutNode { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // AllocationDelay Only valid if type is restart. // Controls how long Elasticsearch will wait for the node to restart and join // the cluster before reassigning its shards to other nodes. diff --git a/typedapi/shutdown/putnode/request.go b/typedapi/shutdown/putnode/request.go index 93ecd41b4c..48f2b10688 100644 --- a/typedapi/shutdown/putnode/request.go +++ b/typedapi/shutdown/putnode/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putnode @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putnode // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/shutdown/put_node/ShutdownPutNodeRequest.ts#L25-L76 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/shutdown/put_node/ShutdownPutNodeRequest.ts#L25-L76 type Request struct { // AllocationDelay Only valid if type is restart. @@ -69,6 +69,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/shutdown/putnode/response.go b/typedapi/shutdown/putnode/response.go index fd32fc7fd7..92e59df3f5 100644 --- a/typedapi/shutdown/putnode/response.go +++ b/typedapi/shutdown/putnode/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putnode // Response holds the response body struct for the package putnode // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/shutdown/put_node/ShutdownPutNodeResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/shutdown/put_node/ShutdownPutNodeResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/slm/deletelifecycle/delete_lifecycle.go b/typedapi/slm/deletelifecycle/delete_lifecycle.go index 4357ee8ce0..e57f9e19fb 100644 --- a/typedapi/slm/deletelifecycle/delete_lifecycle.go +++ b/typedapi/slm/deletelifecycle/delete_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes an existing snapshot lifecycle policy. package deletelifecycle @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -262,7 +262,7 @@ func (r DeleteLifecycle) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -298,3 +298,47 @@ func (r *DeleteLifecycle) _policyid(policyid string) *DeleteLifecycle { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteLifecycle) ErrorTrace(errortrace bool) *DeleteLifecycle { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteLifecycle) FilterPath(filterpaths ...string) *DeleteLifecycle { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteLifecycle) Human(human bool) *DeleteLifecycle { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteLifecycle) Pretty(pretty bool) *DeleteLifecycle { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/slm/deletelifecycle/response.go b/typedapi/slm/deletelifecycle/response.go index f5fa92d65b..3631ba1359 100644 --- a/typedapi/slm/deletelifecycle/response.go +++ b/typedapi/slm/deletelifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletelifecycle // Response holds the response body struct for the package deletelifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/delete_lifecycle/DeleteSnapshotLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/delete_lifecycle/DeleteSnapshotLifecycleResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/slm/executelifecycle/execute_lifecycle.go b/typedapi/slm/executelifecycle/execute_lifecycle.go index e7b0b33a27..cf88fae83c 100644 --- a/typedapi/slm/executelifecycle/execute_lifecycle.go +++ b/typedapi/slm/executelifecycle/execute_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Immediately creates a snapshot according to the lifecycle policy, without // waiting for the scheduled time. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -266,7 +266,7 @@ func (r ExecuteLifecycle) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -302,3 +302,47 @@ func (r *ExecuteLifecycle) _policyid(policyid string) *ExecuteLifecycle { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ExecuteLifecycle) ErrorTrace(errortrace bool) *ExecuteLifecycle { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ExecuteLifecycle) FilterPath(filterpaths ...string) *ExecuteLifecycle { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ExecuteLifecycle) Human(human bool) *ExecuteLifecycle { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ExecuteLifecycle) Pretty(pretty bool) *ExecuteLifecycle { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/slm/executelifecycle/response.go b/typedapi/slm/executelifecycle/response.go index 0440fcaaea..7a7b5e9ee9 100644 --- a/typedapi/slm/executelifecycle/response.go +++ b/typedapi/slm/executelifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package executelifecycle // Response holds the response body struct for the package executelifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/execute_lifecycle/ExecuteSnapshotLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/execute_lifecycle/ExecuteSnapshotLifecycleResponse.ts#L22-L24 type Response struct { SnapshotName string `json:"snapshot_name"` } diff --git a/typedapi/slm/executeretention/execute_retention.go b/typedapi/slm/executeretention/execute_retention.go index 7b3dd2707b..18c9a92fc9 100644 --- a/typedapi/slm/executeretention/execute_retention.go +++ b/typedapi/slm/executeretention/execute_retention.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes any snapshots that are expired according to the policy's retention // rules. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -250,7 +250,7 @@ func (r ExecuteRetention) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -277,3 +277,47 @@ func (r *ExecuteRetention) Header(key, value string) *ExecuteRetention { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ExecuteRetention) ErrorTrace(errortrace bool) *ExecuteRetention { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ExecuteRetention) FilterPath(filterpaths ...string) *ExecuteRetention { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ExecuteRetention) Human(human bool) *ExecuteRetention { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ExecuteRetention) Pretty(pretty bool) *ExecuteRetention { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/slm/executeretention/response.go b/typedapi/slm/executeretention/response.go index f46811f92d..4ce832d887 100644 --- a/typedapi/slm/executeretention/response.go +++ b/typedapi/slm/executeretention/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package executeretention // Response holds the response body struct for the package executeretention // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/execute_retention/ExecuteRetentionResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/execute_retention/ExecuteRetentionResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/slm/getlifecycle/get_lifecycle.go b/typedapi/slm/getlifecycle/get_lifecycle.go index 050c80d469..fe06b8089b 100644 --- a/typedapi/slm/getlifecycle/get_lifecycle.go +++ b/typedapi/slm/getlifecycle/get_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves one or more snapshot lifecycle policy definitions and information // about the latest snapshot attempts. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -269,7 +269,7 @@ func (r GetLifecycle) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -305,3 +305,47 @@ func (r *GetLifecycle) PolicyId(policyid string) *GetLifecycle { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetLifecycle) ErrorTrace(errortrace bool) *GetLifecycle { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetLifecycle) FilterPath(filterpaths ...string) *GetLifecycle { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetLifecycle) Human(human bool) *GetLifecycle { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetLifecycle) Pretty(pretty bool) *GetLifecycle { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/slm/getlifecycle/response.go b/typedapi/slm/getlifecycle/response.go index 5eea302d68..dd1023a5d1 100644 --- a/typedapi/slm/getlifecycle/response.go +++ b/typedapi/slm/getlifecycle/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getlifecycle @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/get_lifecycle/GetSnapshotLifecycleResponse.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/get_lifecycle/GetSnapshotLifecycleResponse.ts#L24-L27 type Response map[string]types.SnapshotLifecycle diff --git a/typedapi/slm/getstats/get_stats.go b/typedapi/slm/getstats/get_stats.go index 97a7dbb2b3..215a61e756 100644 --- a/typedapi/slm/getstats/get_stats.go +++ b/typedapi/slm/getstats/get_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns global and policy-level statistics about actions taken by snapshot // lifecycle management. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -250,7 +250,7 @@ func (r GetStats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -277,3 +277,47 @@ func (r *GetStats) Header(key, value string) *GetStats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetStats) ErrorTrace(errortrace bool) *GetStats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetStats) FilterPath(filterpaths ...string) *GetStats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetStats) Human(human bool) *GetStats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetStats) Pretty(pretty bool) *GetStats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/slm/getstats/response.go b/typedapi/slm/getstats/response.go index 9c4b80fe58..563b14f7c9 100644 --- a/typedapi/slm/getstats/response.go +++ b/typedapi/slm/getstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getstats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/get_stats/GetSnapshotLifecycleStatsResponse.ts#L23-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/get_stats/GetSnapshotLifecycleStatsResponse.ts#L23-L36 type Response struct { PolicyStats []string `json:"policy_stats"` RetentionDeletionTime types.Duration `json:"retention_deletion_time"` diff --git a/typedapi/slm/getstatus/get_status.go b/typedapi/slm/getstatus/get_status.go index d923d2814b..5b8fee5935 100644 --- a/typedapi/slm/getstatus/get_status.go +++ b/typedapi/slm/getstatus/get_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves the status of snapshot lifecycle management (SLM). package getstatus @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -248,7 +248,7 @@ func (r GetStatus) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +275,47 @@ func (r *GetStatus) Header(key, value string) *GetStatus { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetStatus) ErrorTrace(errortrace bool) *GetStatus { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetStatus) FilterPath(filterpaths ...string) *GetStatus { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetStatus) Human(human bool) *GetStatus { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetStatus) Pretty(pretty bool) *GetStatus { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/slm/getstatus/response.go b/typedapi/slm/getstatus/response.go index 52df8fdf60..90e3a4c210 100644 --- a/typedapi/slm/getstatus/response.go +++ b/typedapi/slm/getstatus/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getstatus @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getstatus // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/get_status/GetSnapshotLifecycleManagementStatusResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/get_status/GetSnapshotLifecycleManagementStatusResponse.ts#L22-L24 type Response struct { OperationMode lifecycleoperationmode.LifecycleOperationMode `json:"operation_mode"` } diff --git a/typedapi/slm/putlifecycle/put_lifecycle.go b/typedapi/slm/putlifecycle/put_lifecycle.go index c1ba24a72b..7e8cff3239 100644 --- a/typedapi/slm/putlifecycle/put_lifecycle.go +++ b/typedapi/slm/putlifecycle/put_lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates or updates a snapshot lifecycle policy. package putlifecycle @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -332,6 +333,50 @@ func (r *PutLifecycle) Timeout(duration string) *PutLifecycle { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutLifecycle) ErrorTrace(errortrace bool) *PutLifecycle { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutLifecycle) FilterPath(filterpaths ...string) *PutLifecycle { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutLifecycle) Human(human bool) *PutLifecycle { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutLifecycle) Pretty(pretty bool) *PutLifecycle { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Config Configuration for each snapshot created by the policy. // API name: config func (r *PutLifecycle) Config(config *types.Configuration) *PutLifecycle { diff --git a/typedapi/slm/putlifecycle/request.go b/typedapi/slm/putlifecycle/request.go index 86ded6fcbe..8ed57c25d6 100644 --- a/typedapi/slm/putlifecycle/request.go +++ b/typedapi/slm/putlifecycle/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putlifecycle @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/put_lifecycle/PutSnapshotLifecycleRequest.ts#L26-L72 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/put_lifecycle/PutSnapshotLifecycleRequest.ts#L26-L72 type Request struct { // Config Configuration for each snapshot created by the policy. @@ -56,6 +56,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/slm/putlifecycle/response.go b/typedapi/slm/putlifecycle/response.go index f3ae7e9218..674b78f65d 100644 --- a/typedapi/slm/putlifecycle/response.go +++ b/typedapi/slm/putlifecycle/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putlifecycle // Response holds the response body struct for the package putlifecycle // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/put_lifecycle/PutSnapshotLifecycleResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/put_lifecycle/PutSnapshotLifecycleResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/slm/start/response.go b/typedapi/slm/start/response.go index 62bf309b7a..45e1c1a9f7 100644 --- a/typedapi/slm/start/response.go +++ b/typedapi/slm/start/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package start // Response holds the response body struct for the package start // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/start/StartSnapshotLifecycleManagementResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/start/StartSnapshotLifecycleManagementResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/slm/start/start.go b/typedapi/slm/start/start.go index 7dae95b738..3265444c0c 100644 --- a/typedapi/slm/start/start.go +++ b/typedapi/slm/start/start.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Turns on snapshot lifecycle management (SLM). package start @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -248,7 +248,7 @@ func (r Start) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +275,47 @@ func (r *Start) Header(key, value string) *Start { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Start) ErrorTrace(errortrace bool) *Start { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Start) FilterPath(filterpaths ...string) *Start { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Start) Human(human bool) *Start { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Start) Pretty(pretty bool) *Start { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/slm/stop/response.go b/typedapi/slm/stop/response.go index e6fc725010..8aafb782c1 100644 --- a/typedapi/slm/stop/response.go +++ b/typedapi/slm/stop/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stop // Response holds the response body struct for the package stop // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/stop/StopSnapshotLifecycleManagementResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/stop/StopSnapshotLifecycleManagementResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/slm/stop/stop.go b/typedapi/slm/stop/stop.go index d5efec98ea..5322516b82 100644 --- a/typedapi/slm/stop/stop.go +++ b/typedapi/slm/stop/stop.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Turns off snapshot lifecycle management (SLM). package stop @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -248,7 +248,7 @@ func (r Stop) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +275,47 @@ func (r *Stop) Header(key, value string) *Stop { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Stop) ErrorTrace(errortrace bool) *Stop { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Stop) FilterPath(filterpaths ...string) *Stop { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Stop) Human(human bool) *Stop { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Stop) Pretty(pretty bool) *Stop { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/snapshot/cleanuprepository/cleanup_repository.go b/typedapi/snapshot/cleanuprepository/cleanup_repository.go index 094586f45a..ccbb5d0f3e 100644 --- a/typedapi/snapshot/cleanuprepository/cleanup_repository.go +++ b/typedapi/snapshot/cleanuprepository/cleanup_repository.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Removes stale data from repository. +// Triggers the review of a snapshot repository’s contents and deletes any stale +// data not referenced by existing snapshots. package cleanuprepository import ( @@ -27,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +77,8 @@ func NewCleanupRepositoryFunc(tp elastictransport.Interface) NewCleanupRepositor } } -// Removes stale data from repository. +// Triggers the review of a snapshot repository’s contents and deletes any stale +// data not referenced by existing snapshots. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/clean-up-snapshot-repo-api.html func New(tp elastictransport.Interface) *CleanupRepository { @@ -262,7 +264,7 @@ func (r CleanupRepository) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -314,3 +316,47 @@ func (r *CleanupRepository) Timeout(duration string) *CleanupRepository { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *CleanupRepository) ErrorTrace(errortrace bool) *CleanupRepository { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *CleanupRepository) FilterPath(filterpaths ...string) *CleanupRepository { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *CleanupRepository) Human(human bool) *CleanupRepository { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *CleanupRepository) Pretty(pretty bool) *CleanupRepository { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/snapshot/cleanuprepository/response.go b/typedapi/snapshot/cleanuprepository/response.go index 4571ac2420..745f6ab687 100644 --- a/typedapi/snapshot/cleanuprepository/response.go +++ b/typedapi/snapshot/cleanuprepository/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package cleanuprepository @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package cleanuprepository // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/cleanup_repository/SnapshotCleanupRepositoryResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/cleanup_repository/SnapshotCleanupRepositoryResponse.ts#L22-L27 type Response struct { // Results Statistics for cleanup operations. diff --git a/typedapi/snapshot/clone/clone.go b/typedapi/snapshot/clone/clone.go index 72d0576400..2fe8c1afe3 100644 --- a/typedapi/snapshot/clone/clone.go +++ b/typedapi/snapshot/clone/clone.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Clones indices from one snapshot into another snapshot in the same // repository. @@ -31,6 +31,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -371,6 +372,50 @@ func (r *Clone) Timeout(duration string) *Clone { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Clone) ErrorTrace(errortrace bool) *Clone { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Clone) FilterPath(filterpaths ...string) *Clone { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Clone) Human(human bool) *Clone { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Clone) Pretty(pretty bool) *Clone { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: indices func (r *Clone) Indices(indices string) *Clone { diff --git a/typedapi/snapshot/clone/request.go b/typedapi/snapshot/clone/request.go index d6da40143e..1481bc3eb4 100644 --- a/typedapi/snapshot/clone/request.go +++ b/typedapi/snapshot/clone/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clone @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package clone // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/clone/SnapshotCloneRequest.ts#L24-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/clone/SnapshotCloneRequest.ts#L24-L42 type Request struct { Indices string `json:"indices"` } @@ -35,6 +35,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/snapshot/clone/response.go b/typedapi/snapshot/clone/response.go index be5f8c69a1..e476d3a73d 100644 --- a/typedapi/snapshot/clone/response.go +++ b/typedapi/snapshot/clone/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clone // Response holds the response body struct for the package clone // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/clone/SnapshotCloneResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/clone/SnapshotCloneResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/snapshot/create/create.go b/typedapi/snapshot/create/create.go index a70c6cfad4..2bcd864611 100644 --- a/typedapi/snapshot/create/create.go +++ b/typedapi/snapshot/create/create.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates a snapshot in a repository. package create @@ -351,6 +351,50 @@ func (r *Create) WaitForCompletion(waitforcompletion bool) *Create { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Create) ErrorTrace(errortrace bool) *Create { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Create) FilterPath(filterpaths ...string) *Create { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Create) Human(human bool) *Create { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Create) Pretty(pretty bool) *Create { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // FeatureStates Feature states to include in the snapshot. Each feature state includes one or // more system indices containing related data. You can view a list of eligible // features using the get features API. If `include_global_state` is `true`, all diff --git a/typedapi/snapshot/create/request.go b/typedapi/snapshot/create/request.go index ef482ff6cd..09375a4b15 100644 --- a/typedapi/snapshot/create/request.go +++ b/typedapi/snapshot/create/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package create @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/create/SnapshotCreateRequest.ts#L24-L81 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/create/SnapshotCreateRequest.ts#L24-L81 type Request struct { // FeatureStates Feature states to include in the snapshot. Each feature state includes one or @@ -69,6 +69,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -104,7 +105,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "ignore_unavailable": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -118,7 +119,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "include_global_state": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -153,7 +154,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "partial": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/snapshot/create/response.go b/typedapi/snapshot/create/response.go index fb3fc9c4fb..ad2f4649e0 100644 --- a/typedapi/snapshot/create/response.go +++ b/typedapi/snapshot/create/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package create @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package create // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/create/SnapshotCreateResponse.ts#L22-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/create/SnapshotCreateResponse.ts#L22-L35 type Response struct { // Accepted Equals `true` if the snapshot was accepted. Present when the request had diff --git a/typedapi/snapshot/createrepository/create_repository.go b/typedapi/snapshot/createrepository/create_repository.go index 3f738f3be1..ddc10788b1 100644 --- a/typedapi/snapshot/createrepository/create_repository.go +++ b/typedapi/snapshot/createrepository/create_repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates a repository. package createrepository @@ -334,3 +334,47 @@ func (r *CreateRepository) Verify(verify bool) *CreateRepository { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *CreateRepository) ErrorTrace(errortrace bool) *CreateRepository { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *CreateRepository) FilterPath(filterpaths ...string) *CreateRepository { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *CreateRepository) Human(human bool) *CreateRepository { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *CreateRepository) Pretty(pretty bool) *CreateRepository { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/snapshot/createrepository/request.go b/typedapi/snapshot/createrepository/request.go index 7635dffc7e..601d0a3a0a 100644 --- a/typedapi/snapshot/createrepository/request.go +++ b/typedapi/snapshot/createrepository/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package createrepository @@ -26,5 +26,5 @@ import ( // Request holds the request body struct for the package createrepository // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/create_repository/SnapshotCreateRepositoryRequest.ts#L25-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/create_repository/SnapshotCreateRepositoryRequest.ts#L25-L42 type Request = types.Repository diff --git a/typedapi/snapshot/createrepository/response.go b/typedapi/snapshot/createrepository/response.go index 8dd5c2bf14..0cd2bf7db0 100644 --- a/typedapi/snapshot/createrepository/response.go +++ b/typedapi/snapshot/createrepository/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package createrepository // Response holds the response body struct for the package createrepository // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/create_repository/SnapshotCreateRepositoryResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/create_repository/SnapshotCreateRepositoryResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/snapshot/delete/delete.go b/typedapi/snapshot/delete/delete.go index 11e2736890..7d4a1d5464 100644 --- a/typedapi/snapshot/delete/delete.go +++ b/typedapi/snapshot/delete/delete.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes one or more snapshots. package delete @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -271,7 +271,7 @@ func (r Delete) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -324,3 +324,47 @@ func (r *Delete) MasterTimeout(duration string) *Delete { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Delete) ErrorTrace(errortrace bool) *Delete { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Delete) FilterPath(filterpaths ...string) *Delete { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Delete) Human(human bool) *Delete { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Delete) Pretty(pretty bool) *Delete { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/snapshot/delete/response.go b/typedapi/snapshot/delete/response.go index cbc40539d5..97e581cacf 100644 --- a/typedapi/snapshot/delete/response.go +++ b/typedapi/snapshot/delete/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package delete // Response holds the response body struct for the package delete // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/delete/SnapshotDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/delete/SnapshotDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/snapshot/deleterepository/delete_repository.go b/typedapi/snapshot/deleterepository/delete_repository.go index 839277e244..8e125001e2 100644 --- a/typedapi/snapshot/deleterepository/delete_repository.go +++ b/typedapi/snapshot/deleterepository/delete_repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes a repository. package deleterepository @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -260,7 +260,7 @@ func (r DeleteRepository) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -313,3 +313,47 @@ func (r *DeleteRepository) Timeout(duration string) *DeleteRepository { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteRepository) ErrorTrace(errortrace bool) *DeleteRepository { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteRepository) FilterPath(filterpaths ...string) *DeleteRepository { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteRepository) Human(human bool) *DeleteRepository { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteRepository) Pretty(pretty bool) *DeleteRepository { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/snapshot/deleterepository/response.go b/typedapi/snapshot/deleterepository/response.go index 3d1b0e979f..86dc03de42 100644 --- a/typedapi/snapshot/deleterepository/response.go +++ b/typedapi/snapshot/deleterepository/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deleterepository // Response holds the response body struct for the package deleterepository // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/delete_repository/SnapshotDeleteRepositoryResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/delete_repository/SnapshotDeleteRepositoryResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/snapshot/get/get.go b/typedapi/snapshot/get/get.go index 4e127cc228..8109378bda 100644 --- a/typedapi/snapshot/get/get.go +++ b/typedapi/snapshot/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns information about a snapshot. package get @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -274,7 +273,7 @@ func (r Get) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -449,3 +448,47 @@ func (r *Get) SlmPolicyFilter(name string) *Get { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Get) ErrorTrace(errortrace bool) *Get { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Get) FilterPath(filterpaths ...string) *Get { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Get) Human(human bool) *Get { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Get) Pretty(pretty bool) *Get { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/snapshot/get/response.go b/typedapi/snapshot/get/response.go index 3d33828e88..269782f100 100644 --- a/typedapi/snapshot/get/response.go +++ b/typedapi/snapshot/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package get @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/get/SnapshotGetResponse.ts#L25-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/get/SnapshotGetResponse.ts#L25-L42 type Response struct { // Remaining The number of remaining snapshots that were not returned due to size limits diff --git a/typedapi/snapshot/getrepository/get_repository.go b/typedapi/snapshot/getrepository/get_repository.go index c71216ccce..cdf3e9c731 100644 --- a/typedapi/snapshot/getrepository/get_repository.go +++ b/typedapi/snapshot/getrepository/get_repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns information about a repository. package getrepository @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -264,7 +263,7 @@ func (r GetRepository) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -317,3 +316,47 @@ func (r *GetRepository) MasterTimeout(duration string) *GetRepository { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetRepository) ErrorTrace(errortrace bool) *GetRepository { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetRepository) FilterPath(filterpaths ...string) *GetRepository { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetRepository) Human(human bool) *GetRepository { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetRepository) Pretty(pretty bool) *GetRepository { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/snapshot/getrepository/response.go b/typedapi/snapshot/getrepository/response.go index 563614bea2..45a3df3fd4 100644 --- a/typedapi/snapshot/getrepository/response.go +++ b/typedapi/snapshot/getrepository/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getrepository @@ -32,7 +32,7 @@ import ( // Response holds the response body struct for the package getrepository // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/get_repository/SnapshotGetRepositoryResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/get_repository/SnapshotGetRepositoryResponse.ts#L23-L25 type Response map[string]types.Repository @@ -45,7 +45,7 @@ func NewResponse() Response { func (r Response) UnmarshalJSON(data []byte) error { dec := json.NewDecoder(bytes.NewReader(data)) - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) dec.Decode(&o) dec = json.NewDecoder(bytes.NewReader(data)) for { @@ -59,7 +59,7 @@ func (r Response) UnmarshalJSON(data []byte) error { key := fmt.Sprintf("%s", t) if target, ok := o[key]; ok { - if t, ok := target.(map[string]interface{})["type"]; ok { + if t, ok := target.(map[string]any)["type"]; ok { switch t { diff --git a/typedapi/snapshot/restore/request.go b/typedapi/snapshot/restore/request.go index 7079ef56ee..421f393365 100644 --- a/typedapi/snapshot/restore/request.go +++ b/typedapi/snapshot/restore/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package restore @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package restore // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/restore/SnapshotRestoreRequest.ts#L25-L51 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/restore/SnapshotRestoreRequest.ts#L25-L51 type Request struct { FeatureStates []string `json:"feature_states,omitempty"` IgnoreIndexSettings []string `json:"ignore_index_settings,omitempty"` @@ -50,6 +50,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -90,7 +91,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "ignore_unavailable": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -104,7 +105,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "include_aliases": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -118,7 +119,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "include_global_state": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -153,7 +154,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "partial": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/snapshot/restore/response.go b/typedapi/snapshot/restore/response.go index 08a63f6dc3..21b3f032fe 100644 --- a/typedapi/snapshot/restore/response.go +++ b/typedapi/snapshot/restore/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package restore @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package restore // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/restore/SnapshotRestoreResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/restore/SnapshotRestoreResponse.ts#L23-L25 type Response struct { Snapshot types.SnapshotRestore `json:"snapshot"` } diff --git a/typedapi/snapshot/restore/restore.go b/typedapi/snapshot/restore/restore.go index 962d014c5a..1eb9a07680 100644 --- a/typedapi/snapshot/restore/restore.go +++ b/typedapi/snapshot/restore/restore.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Restores a snapshot. package restore @@ -351,6 +351,50 @@ func (r *Restore) WaitForCompletion(waitforcompletion bool) *Restore { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Restore) ErrorTrace(errortrace bool) *Restore { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Restore) FilterPath(filterpaths ...string) *Restore { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Restore) Human(human bool) *Restore { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Restore) Pretty(pretty bool) *Restore { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: feature_states func (r *Restore) FeatureStates(featurestates ...string) *Restore { r.req.FeatureStates = featurestates diff --git a/typedapi/snapshot/status/response.go b/typedapi/snapshot/status/response.go index 33830350d4..76727ba5c6 100644 --- a/typedapi/snapshot/status/response.go +++ b/typedapi/snapshot/status/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package status @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package status // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/status/SnapshotStatusResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/status/SnapshotStatusResponse.ts#L22-L24 type Response struct { Snapshots []types.Status `json:"snapshots"` } diff --git a/typedapi/snapshot/status/status.go b/typedapi/snapshot/status/status.go index bbb433c1c6..47e2ef4d3d 100644 --- a/typedapi/snapshot/status/status.go +++ b/typedapi/snapshot/status/status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns information about the status of a snapshot. package status @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -290,7 +289,7 @@ func (r Status) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -352,3 +351,47 @@ func (r *Status) MasterTimeout(duration string) *Status { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Status) ErrorTrace(errortrace bool) *Status { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Status) FilterPath(filterpaths ...string) *Status { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Status) Human(human bool) *Status { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Status) Pretty(pretty bool) *Status { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/snapshot/verifyrepository/response.go b/typedapi/snapshot/verifyrepository/response.go index c4e9fd431d..a513fbd155 100644 --- a/typedapi/snapshot/verifyrepository/response.go +++ b/typedapi/snapshot/verifyrepository/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package verifyrepository @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package verifyrepository // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/verify_repository/SnapshotVerifyRepositoryResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/verify_repository/SnapshotVerifyRepositoryResponse.ts#L23-L25 type Response struct { Nodes map[string]types.CompactNodeInfo `json:"nodes"` } diff --git a/typedapi/snapshot/verifyrepository/verify_repository.go b/typedapi/snapshot/verifyrepository/verify_repository.go index 0fd086b2fa..8b43ad67d5 100644 --- a/typedapi/snapshot/verifyrepository/verify_repository.go +++ b/typedapi/snapshot/verifyrepository/verify_repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Verifies a repository. package verifyrepository @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -262,7 +262,7 @@ func (r VerifyRepository) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -314,3 +314,47 @@ func (r *VerifyRepository) Timeout(duration string) *VerifyRepository { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *VerifyRepository) ErrorTrace(errortrace bool) *VerifyRepository { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *VerifyRepository) FilterPath(filterpaths ...string) *VerifyRepository { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *VerifyRepository) Human(human bool) *VerifyRepository { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *VerifyRepository) Pretty(pretty bool) *VerifyRepository { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/sql/clearcursor/clear_cursor.go b/typedapi/sql/clearcursor/clear_cursor.go index 24ff388bab..19174d60a8 100644 --- a/typedapi/sql/clearcursor/clear_cursor.go +++ b/typedapi/sql/clearcursor/clear_cursor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Clears the SQL cursor package clearcursor @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -291,6 +292,50 @@ func (r *ClearCursor) Header(key, value string) *ClearCursor { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ClearCursor) ErrorTrace(errortrace bool) *ClearCursor { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ClearCursor) FilterPath(filterpaths ...string) *ClearCursor { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ClearCursor) Human(human bool) *ClearCursor { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ClearCursor) Pretty(pretty bool) *ClearCursor { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Cursor Cursor to clear. // API name: cursor func (r *ClearCursor) Cursor(cursor string) *ClearCursor { diff --git a/typedapi/sql/clearcursor/request.go b/typedapi/sql/clearcursor/request.go index 492163282b..2c916f65be 100644 --- a/typedapi/sql/clearcursor/request.go +++ b/typedapi/sql/clearcursor/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clearcursor @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package clearcursor // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/sql/clear_cursor/ClearSqlCursorRequest.ts#L22-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/sql/clear_cursor/ClearSqlCursorRequest.ts#L22-L34 type Request struct { // Cursor Cursor to clear. @@ -37,6 +37,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/sql/clearcursor/response.go b/typedapi/sql/clearcursor/response.go index 352f96e52b..decb39584d 100644 --- a/typedapi/sql/clearcursor/response.go +++ b/typedapi/sql/clearcursor/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package clearcursor // Response holds the response body struct for the package clearcursor // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/sql/clear_cursor/ClearSqlCursorResponse.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/sql/clear_cursor/ClearSqlCursorResponse.ts#L20-L22 type Response struct { Succeeded bool `json:"succeeded"` } diff --git a/typedapi/sql/deleteasync/delete_async.go b/typedapi/sql/deleteasync/delete_async.go index ef9e709fe6..99a5af8cb8 100644 --- a/typedapi/sql/deleteasync/delete_async.go +++ b/typedapi/sql/deleteasync/delete_async.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes an async SQL search or a stored synchronous SQL search. If the search // is still running, the API cancels it. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -266,7 +266,7 @@ func (r DeleteAsync) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -302,3 +302,47 @@ func (r *DeleteAsync) _id(id string) *DeleteAsync { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteAsync) ErrorTrace(errortrace bool) *DeleteAsync { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteAsync) FilterPath(filterpaths ...string) *DeleteAsync { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteAsync) Human(human bool) *DeleteAsync { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteAsync) Pretty(pretty bool) *DeleteAsync { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/sql/deleteasync/response.go b/typedapi/sql/deleteasync/response.go index e76706e5f1..68ff95279b 100644 --- a/typedapi/sql/deleteasync/response.go +++ b/typedapi/sql/deleteasync/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deleteasync // Response holds the response body struct for the package deleteasync // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/sql/delete_async/SqlDeleteAsyncResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/sql/delete_async/SqlDeleteAsyncResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/sql/getasync/get_async.go b/typedapi/sql/getasync/get_async.go index 0392f09c69..b30cb9ed05 100644 --- a/typedapi/sql/getasync/get_async.go +++ b/typedapi/sql/getasync/get_async.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns the current status and available results for an async SQL search or // stored synchronous SQL search @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r GetAsync) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -337,3 +337,47 @@ func (r *GetAsync) WaitForCompletionTimeout(duration string) *GetAsync { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetAsync) ErrorTrace(errortrace bool) *GetAsync { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetAsync) FilterPath(filterpaths ...string) *GetAsync { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetAsync) Human(human bool) *GetAsync { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetAsync) Pretty(pretty bool) *GetAsync { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/sql/getasync/response.go b/typedapi/sql/getasync/response.go index e87b2fa27b..3b702494b9 100644 --- a/typedapi/sql/getasync/response.go +++ b/typedapi/sql/getasync/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getasync @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package getasync // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/sql/get_async/SqlGetAsyncResponse.ts#L23-L60 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/sql/get_async/SqlGetAsyncResponse.ts#L23-L60 type Response struct { // Columns Column headings for the search results. Each object is a column. diff --git a/typedapi/sql/getasyncstatus/get_async_status.go b/typedapi/sql/getasyncstatus/get_async_status.go index 01f20de35f..66ee2bf48e 100644 --- a/typedapi/sql/getasyncstatus/get_async_status.go +++ b/typedapi/sql/getasyncstatus/get_async_status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns the current status of an async SQL search or a stored synchronous SQL // search @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -266,7 +266,7 @@ func (r GetAsyncStatus) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -302,3 +302,47 @@ func (r *GetAsyncStatus) _id(id string) *GetAsyncStatus { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetAsyncStatus) ErrorTrace(errortrace bool) *GetAsyncStatus { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetAsyncStatus) FilterPath(filterpaths ...string) *GetAsyncStatus { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetAsyncStatus) Human(human bool) *GetAsyncStatus { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetAsyncStatus) Pretty(pretty bool) *GetAsyncStatus { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/sql/getasyncstatus/response.go b/typedapi/sql/getasyncstatus/response.go index 3c1612066b..bee310b143 100644 --- a/typedapi/sql/getasyncstatus/response.go +++ b/typedapi/sql/getasyncstatus/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getasyncstatus // Response holds the response body struct for the package getasyncstatus // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/sql/get_async_status/SqlGetAsyncStatusResponse.ts#L23-L55 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/sql/get_async_status/SqlGetAsyncStatusResponse.ts#L23-L55 type Response struct { // CompletionStatus HTTP status code for the search. The API only returns this property for diff --git a/typedapi/sql/query/query.go b/typedapi/sql/query/query.go index 3e6acef0d2..42fecdbc4c 100644 --- a/typedapi/sql/query/query.go +++ b/typedapi/sql/query/query.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Executes a SQL request package query @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -297,6 +298,50 @@ func (r *Query) Format(format string) *Query { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Query) ErrorTrace(errortrace bool) *Query { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Query) FilterPath(filterpaths ...string) *Query { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Query) Human(human bool) *Query { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Query) Pretty(pretty bool) *Query { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Catalog Default catalog (cluster) for queries. If unspecified, the queries execute on // the data in the local cluster only. // API name: catalog diff --git a/typedapi/sql/query/request.go b/typedapi/sql/query/request.go index c73e03bd7d..7d8e8951c6 100644 --- a/typedapi/sql/query/request.go +++ b/typedapi/sql/query/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package query @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package query // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/sql/query/QuerySqlRequest.ts#L28-L122 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/sql/query/QuerySqlRequest.ts#L28-L122 type Request struct { // Catalog Default catalog (cluster) for queries. If unspecified, the queries execute on @@ -87,6 +87,7 @@ func NewRequest() *Request { r := &Request{ Params: make(map[string]json.RawMessage, 0), } + return r } @@ -129,7 +130,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { s.Catalog = &o case "columnar": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -156,7 +157,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "fetch_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -171,7 +172,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "field_multi_value_leniency": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -190,7 +191,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "index_using_frozen": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -209,7 +210,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { } case "keep_on_completion": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/sql/query/response.go b/typedapi/sql/query/response.go index ef4d983388..6fd93306e5 100644 --- a/typedapi/sql/query/response.go +++ b/typedapi/sql/query/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package query @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package query // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/sql/query/QuerySqlResponse.ts#L23-L60 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/sql/query/QuerySqlResponse.ts#L23-L60 type Response struct { // Columns Column headings for the search results. Each object is a column. diff --git a/typedapi/sql/translate/request.go b/typedapi/sql/translate/request.go index 3a6414fda8..6be597fe82 100644 --- a/typedapi/sql/translate/request.go +++ b/typedapi/sql/translate/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package translate @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package translate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/sql/translate/TranslateSqlRequest.ts#L25-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/sql/translate/TranslateSqlRequest.ts#L25-L54 type Request struct { // FetchSize The maximum number of rows (or entries) to return in one response. @@ -49,6 +49,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -80,7 +81,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "fetch_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/sql/translate/response.go b/typedapi/sql/translate/response.go index 424ea310e5..6cb60717fe 100644 --- a/typedapi/sql/translate/response.go +++ b/typedapi/sql/translate/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package translate @@ -33,7 +33,7 @@ import ( // Response holds the response body struct for the package translate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/sql/translate/TranslateSqlResponse.ts#L28-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/sql/translate/TranslateSqlResponse.ts#L28-L38 type Response struct { Aggregations map[string]types.Aggregations `json:"aggregations,omitempty"` Fields []types.FieldAndFormat `json:"fields,omitempty"` @@ -84,7 +84,7 @@ func (s *Response) UnmarshalJSON(data []byte) error { } case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/sql/translate/translate.go b/typedapi/sql/translate/translate.go index 7c96665545..125d08b5ed 100644 --- a/typedapi/sql/translate/translate.go +++ b/typedapi/sql/translate/translate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Translates SQL into Elasticsearch queries package translate @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -291,6 +292,50 @@ func (r *Translate) Header(key, value string) *Translate { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Translate) ErrorTrace(errortrace bool) *Translate { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Translate) FilterPath(filterpaths ...string) *Translate { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Translate) Human(human bool) *Translate { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Translate) Pretty(pretty bool) *Translate { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // FetchSize The maximum number of rows (or entries) to return in one response. // API name: fetch_size func (r *Translate) FetchSize(fetchsize int) *Translate { diff --git a/typedapi/ssl/certificates/certificates.go b/typedapi/ssl/certificates/certificates.go index 167d1e5364..57a2c9f6cb 100644 --- a/typedapi/ssl/certificates/certificates.go +++ b/typedapi/ssl/certificates/certificates.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves information about the X.509 certificates used to encrypt // communications in the cluster. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -250,7 +250,7 @@ func (r Certificates) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -277,3 +277,47 @@ func (r *Certificates) Header(key, value string) *Certificates { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Certificates) ErrorTrace(errortrace bool) *Certificates { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Certificates) FilterPath(filterpaths ...string) *Certificates { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Certificates) Human(human bool) *Certificates { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Certificates) Pretty(pretty bool) *Certificates { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/ssl/certificates/response.go b/typedapi/ssl/certificates/response.go index fc45aed6a8..26287b8185 100644 --- a/typedapi/ssl/certificates/response.go +++ b/typedapi/ssl/certificates/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package certificates @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package certificates // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ssl/certificates/GetCertificatesResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ssl/certificates/GetCertificatesResponse.ts#L22-L24 type Response []types.CertificateInformation diff --git a/typedapi/synonyms/deletesynonym/delete_synonym.go b/typedapi/synonyms/deletesynonym/delete_synonym.go index 87b2fd0544..7271b5e57f 100644 --- a/typedapi/synonyms/deletesynonym/delete_synonym.go +++ b/typedapi/synonyms/deletesynonym/delete_synonym.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes a synonym set package deletesynonym @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -260,7 +260,7 @@ func (r DeleteSynonym) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -296,3 +296,47 @@ func (r *DeleteSynonym) _id(id string) *DeleteSynonym { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteSynonym) ErrorTrace(errortrace bool) *DeleteSynonym { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteSynonym) FilterPath(filterpaths ...string) *DeleteSynonym { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteSynonym) Human(human bool) *DeleteSynonym { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteSynonym) Pretty(pretty bool) *DeleteSynonym { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/synonyms/deletesynonym/response.go b/typedapi/synonyms/deletesynonym/response.go index f02316e5c4..da8ba12b2e 100644 --- a/typedapi/synonyms/deletesynonym/response.go +++ b/typedapi/synonyms/deletesynonym/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletesynonym // Response holds the response body struct for the package deletesynonym // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/synonyms/delete_synonym/SynonymsDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/synonyms/delete_synonym/SynonymsDeleteResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/synonyms/deletesynonymrule/delete_synonym_rule.go b/typedapi/synonyms/deletesynonymrule/delete_synonym_rule.go index 13448dfeb8..dd7dbc856d 100644 --- a/typedapi/synonyms/deletesynonymrule/delete_synonym_rule.go +++ b/typedapi/synonyms/deletesynonymrule/delete_synonym_rule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deletes a synonym rule in a synonym set package deletesynonymrule @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -277,7 +277,7 @@ func (r DeleteSynonymRule) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -322,3 +322,47 @@ func (r *DeleteSynonymRule) _ruleid(ruleid string) *DeleteSynonymRule { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteSynonymRule) ErrorTrace(errortrace bool) *DeleteSynonymRule { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteSynonymRule) FilterPath(filterpaths ...string) *DeleteSynonymRule { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteSynonymRule) Human(human bool) *DeleteSynonymRule { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteSynonymRule) Pretty(pretty bool) *DeleteSynonymRule { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/synonyms/deletesynonymrule/response.go b/typedapi/synonyms/deletesynonymrule/response.go index 88fd154ee2..c360876604 100644 --- a/typedapi/synonyms/deletesynonymrule/response.go +++ b/typedapi/synonyms/deletesynonymrule/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletesynonymrule @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package deletesynonymrule // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/synonyms/delete_synonym_rule/SynonymRuleDeleteResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/synonyms/delete_synonym_rule/SynonymRuleDeleteResponse.ts#L22-L24 type Response struct { // ReloadAnalyzersDetails Updating synonyms in a synonym set reloads the associated analyzers. diff --git a/typedapi/synonyms/getsynonym/get_synonym.go b/typedapi/synonyms/getsynonym/get_synonym.go index b0a7ea3032..a0beb13941 100644 --- a/typedapi/synonyms/getsynonym/get_synonym.go +++ b/typedapi/synonyms/getsynonym/get_synonym.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves a synonym set package getsynonym @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -261,7 +260,7 @@ func (r GetSynonym) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -313,3 +312,47 @@ func (r *GetSynonym) Size(size int) *GetSynonym { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetSynonym) ErrorTrace(errortrace bool) *GetSynonym { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetSynonym) FilterPath(filterpaths ...string) *GetSynonym { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetSynonym) Human(human bool) *GetSynonym { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetSynonym) Pretty(pretty bool) *GetSynonym { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/synonyms/getsynonym/response.go b/typedapi/synonyms/getsynonym/response.go index 9973395757..489e24d788 100644 --- a/typedapi/synonyms/getsynonym/response.go +++ b/typedapi/synonyms/getsynonym/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getsynonym @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getsynonym // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/synonyms/get_synonym/SynonymsGetResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/synonyms/get_synonym/SynonymsGetResponse.ts#L23-L28 type Response struct { Count int `json:"count"` SynonymsSet []types.SynonymRuleRead `json:"synonyms_set"` diff --git a/typedapi/synonyms/getsynonymrule/get_synonym_rule.go b/typedapi/synonyms/getsynonymrule/get_synonym_rule.go index 9f014fa7de..da7e4e0697 100644 --- a/typedapi/synonyms/getsynonymrule/get_synonym_rule.go +++ b/typedapi/synonyms/getsynonymrule/get_synonym_rule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves a synonym rule from a synonym set package getsynonymrule @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -277,7 +277,7 @@ func (r GetSynonymRule) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -322,3 +322,47 @@ func (r *GetSynonymRule) _ruleid(ruleid string) *GetSynonymRule { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetSynonymRule) ErrorTrace(errortrace bool) *GetSynonymRule { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetSynonymRule) FilterPath(filterpaths ...string) *GetSynonymRule { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetSynonymRule) Human(human bool) *GetSynonymRule { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetSynonymRule) Pretty(pretty bool) *GetSynonymRule { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/synonyms/getsynonymrule/response.go b/typedapi/synonyms/getsynonymrule/response.go index d16b74d33d..7745a5ddbc 100644 --- a/typedapi/synonyms/getsynonymrule/response.go +++ b/typedapi/synonyms/getsynonymrule/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getsynonymrule // Response holds the response body struct for the package getsynonymrule // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/synonyms/get_synonym_rule/SynonymRuleGetResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/synonyms/get_synonym_rule/SynonymRuleGetResponse.ts#L22-L24 type Response struct { // Id Synonym Rule identifier diff --git a/typedapi/synonyms/getsynonymssets/get_synonyms_sets.go b/typedapi/synonyms/getsynonymssets/get_synonyms_sets.go index b95ccaac20..9a52bdf595 100644 --- a/typedapi/synonyms/getsynonymssets/get_synonyms_sets.go +++ b/typedapi/synonyms/getsynonymssets/get_synonyms_sets.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves a summary of all defined synonym sets package getsynonymssets @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -247,7 +246,7 @@ func (r GetSynonymsSets) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -290,3 +289,47 @@ func (r *GetSynonymsSets) Size(size int) *GetSynonymsSets { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetSynonymsSets) ErrorTrace(errortrace bool) *GetSynonymsSets { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetSynonymsSets) FilterPath(filterpaths ...string) *GetSynonymsSets { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetSynonymsSets) Human(human bool) *GetSynonymsSets { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetSynonymsSets) Pretty(pretty bool) *GetSynonymsSets { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/synonyms/getsynonymssets/response.go b/typedapi/synonyms/getsynonymssets/response.go index 28913b3353..3d685958fe 100644 --- a/typedapi/synonyms/getsynonymssets/response.go +++ b/typedapi/synonyms/getsynonymssets/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getsynonymssets @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getsynonymssets // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/synonyms/get_synonyms_sets/SynonymsSetsGetResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/synonyms/get_synonyms_sets/SynonymsSetsGetResponse.ts#L23-L28 type Response struct { Count int `json:"count"` Results []types.SynonymsSetItem `json:"results"` diff --git a/typedapi/synonyms/putsynonym/put_synonym.go b/typedapi/synonyms/putsynonym/put_synonym.go index ccd2634c32..d4d0e11b36 100644 --- a/typedapi/synonyms/putsynonym/put_synonym.go +++ b/typedapi/synonyms/putsynonym/put_synonym.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Creates or updates a synonyms set +// Creates or updates a synonym set. package putsynonym import ( @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -80,7 +81,7 @@ func NewPutSynonymFunc(tp elastictransport.Interface) NewPutSynonym { } } -// Creates or updates a synonyms set +// Creates or updates a synonym set. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-synonyms-set.html func New(tp elastictransport.Interface) *PutSynonym { @@ -312,6 +313,50 @@ func (r *PutSynonym) _id(id string) *PutSynonym { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutSynonym) ErrorTrace(errortrace bool) *PutSynonym { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutSynonym) FilterPath(filterpaths ...string) *PutSynonym { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutSynonym) Human(human bool) *PutSynonym { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutSynonym) Pretty(pretty bool) *PutSynonym { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // SynonymsSet The synonym set information to update // API name: synonyms_set func (r *PutSynonym) SynonymsSet(synonymssets ...types.SynonymRule) *PutSynonym { diff --git a/typedapi/synonyms/putsynonym/request.go b/typedapi/synonyms/putsynonym/request.go index d2d2e0ef78..0392c7702e 100644 --- a/typedapi/synonyms/putsynonym/request.go +++ b/typedapi/synonyms/putsynonym/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putsynonym @@ -29,7 +29,7 @@ import ( // Request holds the request body struct for the package putsynonym // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/synonyms/put_synonym/SynonymsPutRequest.ts#L23-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/synonyms/put_synonym/SynonymsPutRequest.ts#L23-L42 type Request struct { // SynonymsSet The synonym set information to update @@ -39,6 +39,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/synonyms/putsynonym/response.go b/typedapi/synonyms/putsynonym/response.go index 7e1ba6b336..272549dcf4 100644 --- a/typedapi/synonyms/putsynonym/response.go +++ b/typedapi/synonyms/putsynonym/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putsynonym @@ -27,10 +27,10 @@ import ( // Response holds the response body struct for the package putsynonym // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/synonyms/put_synonym/SynonymsPutResponse.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/synonyms/put_synonym/SynonymsPutResponse.ts#L23-L28 type Response struct { - ReloadAnalyzersDetails types.ReloadDetails `json:"reload_analyzers_details"` - Result result.Result `json:"result"` + ReloadAnalyzersDetails types.ReloadResult `json:"reload_analyzers_details"` + Result result.Result `json:"result"` } // NewResponse returns a Response diff --git a/typedapi/synonyms/putsynonymrule/put_synonym_rule.go b/typedapi/synonyms/putsynonymrule/put_synonym_rule.go index 16708b8c1c..a494764d1c 100644 --- a/typedapi/synonyms/putsynonymrule/put_synonym_rule.go +++ b/typedapi/synonyms/putsynonymrule/put_synonym_rule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates or updates a synonym rule in a synonym set package putsynonymrule @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -332,6 +333,50 @@ func (r *PutSynonymRule) _ruleid(ruleid string) *PutSynonymRule { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutSynonymRule) ErrorTrace(errortrace bool) *PutSynonymRule { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutSynonymRule) FilterPath(filterpaths ...string) *PutSynonymRule { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutSynonymRule) Human(human bool) *PutSynonymRule { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutSynonymRule) Pretty(pretty bool) *PutSynonymRule { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: synonyms func (r *PutSynonymRule) Synonyms(synonymstring string) *PutSynonymRule { r.req.Synonyms = synonymstring diff --git a/typedapi/synonyms/putsynonymrule/request.go b/typedapi/synonyms/putsynonymrule/request.go index de88e1d185..0cb6928299 100644 --- a/typedapi/synonyms/putsynonymrule/request.go +++ b/typedapi/synonyms/putsynonymrule/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putsynonymrule @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package putsynonymrule // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/synonyms/put_synonym_rule/SynonymRulePutRequest.ts#L23-L47 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/synonyms/put_synonym_rule/SynonymRulePutRequest.ts#L23-L47 type Request struct { Synonyms string `json:"synonyms"` } @@ -35,6 +35,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/synonyms/putsynonymrule/response.go b/typedapi/synonyms/putsynonymrule/response.go index 9d2d701196..1c0fa3eef5 100644 --- a/typedapi/synonyms/putsynonymrule/response.go +++ b/typedapi/synonyms/putsynonymrule/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putsynonymrule @@ -27,7 +27,7 @@ import ( // Response holds the response body struct for the package putsynonymrule // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/synonyms/put_synonym_rule/SynonymRulePutResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/synonyms/put_synonym_rule/SynonymRulePutResponse.ts#L22-L24 type Response struct { // ReloadAnalyzersDetails Updating synonyms in a synonym set reloads the associated analyzers. diff --git a/typedapi/tasks/cancel/cancel.go b/typedapi/tasks/cancel/cancel.go index dfd7bbe3bc..1396bb15da 100644 --- a/typedapi/tasks/cancel/cancel.go +++ b/typedapi/tasks/cancel/cancel.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Cancels a task, if it can be cancelled through an API. package cancel @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -268,7 +267,7 @@ func (r Cancel) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -346,3 +345,47 @@ func (r *Cancel) WaitForCompletion(waitforcompletion bool) *Cancel { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Cancel) ErrorTrace(errortrace bool) *Cancel { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Cancel) FilterPath(filterpaths ...string) *Cancel { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Cancel) Human(human bool) *Cancel { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Cancel) Pretty(pretty bool) *Cancel { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/tasks/cancel/response.go b/typedapi/tasks/cancel/response.go index 8e0248e2f4..a80a9b6964 100644 --- a/typedapi/tasks/cancel/response.go +++ b/typedapi/tasks/cancel/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package cancel @@ -32,7 +32,7 @@ import ( // Response holds the response body struct for the package cancel // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/tasks/cancel/CancelTasksResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/tasks/cancel/CancelTasksResponse.ts#L22-L24 type Response struct { NodeFailures []types.ErrorCause `json:"node_failures,omitempty"` // Nodes Task information grouped by node, if `group_by` was set to `node` (the diff --git a/typedapi/tasks/get/get.go b/typedapi/tasks/get/get.go index cba6b90b55..ce18fc6968 100644 --- a/typedapi/tasks/get/get.go +++ b/typedapi/tasks/get/get.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Returns information about a task. package get @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -261,7 +260,7 @@ func (r Get) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -315,3 +314,47 @@ func (r *Get) WaitForCompletion(waitforcompletion bool) *Get { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Get) ErrorTrace(errortrace bool) *Get { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Get) FilterPath(filterpaths ...string) *Get { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Get) Human(human bool) *Get { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Get) Pretty(pretty bool) *Get { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/tasks/get/response.go b/typedapi/tasks/get/response.go index 38d9cff6f0..1346f1adc8 100644 --- a/typedapi/tasks/get/response.go +++ b/typedapi/tasks/get/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package get @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package get // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/tasks/get/GetTaskResponse.ts#L24-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/tasks/get/GetTaskResponse.ts#L24-L31 type Response struct { Completed bool `json:"completed"` Error *types.ErrorCause `json:"error,omitempty"` diff --git a/typedapi/tasks/list/list.go b/typedapi/tasks/list/list.go index f041192369..e1be909601 100644 --- a/typedapi/tasks/list/list.go +++ b/typedapi/tasks/list/list.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Returns a list of tasks. +// The task management API returns information about tasks currently executing +// on one or more nodes in the cluster. package list import ( @@ -27,7 +28,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -70,7 +70,8 @@ func NewListFunc(tp elastictransport.Interface) NewList { } } -// Returns a list of tasks. +// The task management API returns information about tasks currently executing +// on one or more nodes in the cluster. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html func New(tp elastictransport.Interface) *List { @@ -248,7 +249,7 @@ func (r List) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -351,3 +352,47 @@ func (r *List) WaitForCompletion(waitforcompletion bool) *List { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *List) ErrorTrace(errortrace bool) *List { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *List) FilterPath(filterpaths ...string) *List { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *List) Human(human bool) *List { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *List) Pretty(pretty bool) *List { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/tasks/list/response.go b/typedapi/tasks/list/response.go index 9e8ed6027d..1a66bafd91 100644 --- a/typedapi/tasks/list/response.go +++ b/typedapi/tasks/list/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package list @@ -32,7 +32,7 @@ import ( // Response holds the response body struct for the package list // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/tasks/list/ListTasksResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/tasks/list/ListTasksResponse.ts#L22-L24 type Response struct { NodeFailures []types.ErrorCause `json:"node_failures,omitempty"` // Nodes Task information grouped by node, if `group_by` was set to `node` (the diff --git a/typedapi/textstructure/findfieldstructure/find_field_structure.go b/typedapi/textstructure/findfieldstructure/find_field_structure.go new file mode 100644 index 0000000000..fb4c61e7cd --- /dev/null +++ b/typedapi/textstructure/findfieldstructure/find_field_structure.go @@ -0,0 +1,225 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Finds the structure of a text field in an index. +package findfieldstructure + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type FindFieldStructure struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + raw io.Reader + + paramSet int + + spanStarted bool + + instrument elastictransport.Instrumentation +} + +// NewFindFieldStructure type alias for index. +type NewFindFieldStructure func() *FindFieldStructure + +// NewFindFieldStructureFunc returns a new instance of FindFieldStructure with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewFindFieldStructureFunc(tp elastictransport.Interface) NewFindFieldStructure { + return func() *FindFieldStructure { + n := New(tp) + + return n + } +} + +// Finds the structure of a text field in an index. +// +// https://www.elastic.co/guide/en/elasticsearch/reference/current/find-field-structure.html +func New(tp elastictransport.Interface) *FindFieldStructure { + r := &FindFieldStructure{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + } + + if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { + if instrument := instrumented.InstrumentationEnabled(); instrument != nil { + r.instrument = instrument + } + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *FindFieldStructure) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == 0: + path.WriteString("/") + path.WriteString("_text_structure") + path.WriteString("/") + path.WriteString("find_field_structure") + + method = http.MethodGet + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.raw) + } else { + req, err = http.NewRequest(method, r.path.String(), r.raw) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r FindFieldStructure) Perform(providedCtx context.Context) (*http.Response, error) { + var ctx context.Context + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + if r.spanStarted == false { + ctx := instrument.Start(providedCtx, "text_structure.find_field_structure") + defer instrument.Close(ctx) + } + } + if ctx == nil { + ctx = providedCtx + } + + req, err := r.HttpRequest(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.BeforeRequest(req, "text_structure.find_field_structure") + if reader := instrument.RecordRequestBody(ctx, "text_structure.find_field_structure", r.raw); reader != nil { + req.Body = reader + } + } + res, err := r.transport.Perform(req) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "text_structure.find_field_structure") + } + if err != nil { + localErr := fmt.Errorf("an error happened during the FindFieldStructure query execution: %w", err) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, localErr) + } + return nil, localErr + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a findfieldstructure.Response +func (r FindFieldStructure) Do(ctx context.Context) (bool, error) { + return r.IsSuccess(ctx) +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r FindFieldStructure) IsSuccess(providedCtx context.Context) (bool, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "text_structure.find_field_structure") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(io.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + if res.StatusCode != 404 { + err := fmt.Errorf("an error happened during the FindFieldStructure query execution, status code: %d", res.StatusCode) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return false, err + } + + return false, nil +} + +// Header set a key, value pair in the FindFieldStructure headers map. +func (r *FindFieldStructure) Header(key, value string) *FindFieldStructure { + r.headers.Set(key, value) + + return r +} diff --git a/typedapi/textstructure/findmessagestructure/find_message_structure.go b/typedapi/textstructure/findmessagestructure/find_message_structure.go new file mode 100644 index 0000000000..27ad2775ec --- /dev/null +++ b/typedapi/textstructure/findmessagestructure/find_message_structure.go @@ -0,0 +1,233 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Finds the structure of a list of messages. The messages must contain data +// that is suitable to be ingested into Elasticsearch. +package findmessagestructure + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type FindMessageStructure struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + raw io.Reader + + paramSet int + + spanStarted bool + + instrument elastictransport.Instrumentation +} + +// NewFindMessageStructure type alias for index. +type NewFindMessageStructure func() *FindMessageStructure + +// NewFindMessageStructureFunc returns a new instance of FindMessageStructure with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewFindMessageStructureFunc(tp elastictransport.Interface) NewFindMessageStructure { + return func() *FindMessageStructure { + n := New(tp) + + return n + } +} + +// Finds the structure of a list of messages. The messages must contain data +// that is suitable to be ingested into Elasticsearch. +// +// https://www.elastic.co/guide/en/elasticsearch/reference/current/find-message-structure.html +func New(tp elastictransport.Interface) *FindMessageStructure { + r := &FindMessageStructure{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + } + + if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { + if instrument := instrumented.InstrumentationEnabled(); instrument != nil { + r.instrument = instrument + } + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *FindMessageStructure) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == 0: + path.WriteString("/") + path.WriteString("_text_structure") + path.WriteString("/") + path.WriteString("find_message_structure") + + method = http.MethodPost + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.raw) + } else { + req, err = http.NewRequest(method, r.path.String(), r.raw) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Content-Type") == "" { + if r.raw != nil { + req.Header.Set("Content-Type", "application/vnd.elasticsearch+json;compatible-with=8") + } + } + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r FindMessageStructure) Perform(providedCtx context.Context) (*http.Response, error) { + var ctx context.Context + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + if r.spanStarted == false { + ctx := instrument.Start(providedCtx, "text_structure.find_message_structure") + defer instrument.Close(ctx) + } + } + if ctx == nil { + ctx = providedCtx + } + + req, err := r.HttpRequest(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.BeforeRequest(req, "text_structure.find_message_structure") + if reader := instrument.RecordRequestBody(ctx, "text_structure.find_message_structure", r.raw); reader != nil { + req.Body = reader + } + } + res, err := r.transport.Perform(req) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "text_structure.find_message_structure") + } + if err != nil { + localErr := fmt.Errorf("an error happened during the FindMessageStructure query execution: %w", err) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, localErr) + } + return nil, localErr + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a findmessagestructure.Response +func (r FindMessageStructure) Do(ctx context.Context) (bool, error) { + return r.IsSuccess(ctx) +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r FindMessageStructure) IsSuccess(providedCtx context.Context) (bool, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "text_structure.find_message_structure") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(io.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + if res.StatusCode != 404 { + err := fmt.Errorf("an error happened during the FindMessageStructure query execution, status code: %d", res.StatusCode) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return false, err + } + + return false, nil +} + +// Header set a key, value pair in the FindMessageStructure headers map. +func (r *FindMessageStructure) Header(key, value string) *FindMessageStructure { + r.headers.Set(key, value) + + return r +} diff --git a/typedapi/textstructure/findstructure/find_structure.go b/typedapi/textstructure/findstructure/find_structure.go index bafd310d4a..608326b966 100644 --- a/typedapi/textstructure/findstructure/find_structure.go +++ b/typedapi/textstructure/findstructure/find_structure.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Finds the structure of a text file. The text file must contain data that is // suitable to be ingested into Elasticsearch. @@ -85,6 +85,8 @@ func New(tp elastictransport.Interface) *FindStructure { headers: make(http.Header), buf: gobytes.NewBuffer(nil), + + req: NewRequest(), } if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { diff --git a/typedapi/textstructure/findstructure/request.go b/typedapi/textstructure/findstructure/request.go index 1d049ff40b..8fa3e2ae82 100644 --- a/typedapi/textstructure/findstructure/request.go +++ b/typedapi/textstructure/findstructure/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package findstructure @@ -26,5 +26,12 @@ import ( // Request holds the request body struct for the package findstructure // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/text_structure/find_structure/FindStructureRequest.ts#L24-L75 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/text_structure/find_structure/FindStructureRequest.ts#L24-L75 type Request = []json.RawMessage + +// NewRequest returns a Request +func NewRequest() *Request { + r := make([]json.RawMessage, 0) + + return &r +} diff --git a/typedapi/textstructure/findstructure/response.go b/typedapi/textstructure/findstructure/response.go index e52a61a582..70e07ad00d 100644 --- a/typedapi/textstructure/findstructure/response.go +++ b/typedapi/textstructure/findstructure/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package findstructure @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package findstructure // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/text_structure/find_structure/FindStructureResponse.ts#L27-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/text_structure/find_structure/FindStructureResponse.ts#L27-L52 type Response struct { Charset string `json:"charset"` ColumnNames []string `json:"column_names,omitempty"` diff --git a/typedapi/textstructure/testgrokpattern/request.go b/typedapi/textstructure/testgrokpattern/request.go index 1581559ba6..7ad2358a08 100644 --- a/typedapi/textstructure/testgrokpattern/request.go +++ b/typedapi/textstructure/testgrokpattern/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package testgrokpattern @@ -27,7 +27,7 @@ import ( // Request holds the request body struct for the package testgrokpattern // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/text_structure/test_grok_pattern/TestGrokPatternRequest.ts#L22-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/text_structure/test_grok_pattern/TestGrokPatternRequest.ts#L22-L43 type Request struct { // GrokPattern Grok pattern to run on the text. @@ -39,6 +39,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/textstructure/testgrokpattern/response.go b/typedapi/textstructure/testgrokpattern/response.go index c7a0ad0634..5de0b4dea4 100644 --- a/typedapi/textstructure/testgrokpattern/response.go +++ b/typedapi/textstructure/testgrokpattern/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package testgrokpattern @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package testgrokpattern // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/text_structure/test_grok_pattern/TestGrokPatternResponse.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/text_structure/test_grok_pattern/TestGrokPatternResponse.ts#L22-L26 type Response struct { Matches []types.MatchedText `json:"matches"` } diff --git a/typedapi/textstructure/testgrokpattern/test_grok_pattern.go b/typedapi/textstructure/testgrokpattern/test_grok_pattern.go index 007fd5a74e..847f5504b7 100644 --- a/typedapi/textstructure/testgrokpattern/test_grok_pattern.go +++ b/typedapi/textstructure/testgrokpattern/test_grok_pattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Tests a Grok pattern on some text. package testgrokpattern @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -300,6 +301,50 @@ func (r *TestGrokPattern) EcsCompatibility(ecscompatibility string) *TestGrokPat return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *TestGrokPattern) ErrorTrace(errortrace bool) *TestGrokPattern { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *TestGrokPattern) FilterPath(filterpaths ...string) *TestGrokPattern { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *TestGrokPattern) Human(human bool) *TestGrokPattern { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *TestGrokPattern) Pretty(pretty bool) *TestGrokPattern { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // GrokPattern Grok pattern to run on the text. // API name: grok_pattern func (r *TestGrokPattern) GrokPattern(grokpattern string) *TestGrokPattern { diff --git a/typedapi/transform/deletetransform/delete_transform.go b/typedapi/transform/deletetransform/delete_transform.go index 6c1f6afcce..a1e848e3ab 100644 --- a/typedapi/transform/deletetransform/delete_transform.go +++ b/typedapi/transform/deletetransform/delete_transform.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Deletes an existing transform. +// Deletes a transform. package deletetransform import ( @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -77,7 +76,7 @@ func NewDeleteTransformFunc(tp elastictransport.Interface) NewDeleteTransform { } } -// Deletes an existing transform. +// Deletes a transform. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/delete-transform.html func New(tp elastictransport.Interface) *DeleteTransform { @@ -261,7 +260,7 @@ func (r DeleteTransform) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -326,3 +325,47 @@ func (r *DeleteTransform) Timeout(duration string) *DeleteTransform { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteTransform) ErrorTrace(errortrace bool) *DeleteTransform { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteTransform) FilterPath(filterpaths ...string) *DeleteTransform { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteTransform) Human(human bool) *DeleteTransform { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteTransform) Pretty(pretty bool) *DeleteTransform { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/transform/deletetransform/response.go b/typedapi/transform/deletetransform/response.go index 1f310ee0e6..3d56d91494 100644 --- a/typedapi/transform/deletetransform/response.go +++ b/typedapi/transform/deletetransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletetransform // Response holds the response body struct for the package deletetransform // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/delete_transform/DeleteTransformResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/delete_transform/DeleteTransformResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/transform/getnodestats/get_node_stats.go b/typedapi/transform/getnodestats/get_node_stats.go new file mode 100644 index 0000000000..23590237c3 --- /dev/null +++ b/typedapi/transform/getnodestats/get_node_stats.go @@ -0,0 +1,225 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// Retrieves transform usage information for transform nodes. +package getnodestats + +import ( + "context" + "errors" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "github.com/elastic/elastic-transport-go/v8/elastictransport" +) + +// ErrBuildPath is returned in case of missing parameters within the build of the request. +var ErrBuildPath = errors.New("cannot build path, check for missing path parameters") + +type GetNodeStats struct { + transport elastictransport.Interface + + headers http.Header + values url.Values + path url.URL + + raw io.Reader + + paramSet int + + spanStarted bool + + instrument elastictransport.Instrumentation +} + +// NewGetNodeStats type alias for index. +type NewGetNodeStats func() *GetNodeStats + +// NewGetNodeStatsFunc returns a new instance of GetNodeStats with the provided transport. +// Used in the index of the library this allows to retrieve every apis in once place. +func NewGetNodeStatsFunc(tp elastictransport.Interface) NewGetNodeStats { + return func() *GetNodeStats { + n := New(tp) + + return n + } +} + +// Retrieves transform usage information for transform nodes. +// +// https://www.elastic.co/guide/en/elasticsearch/reference/current/get-transform-node-stats.html +func New(tp elastictransport.Interface) *GetNodeStats { + r := &GetNodeStats{ + transport: tp, + values: make(url.Values), + headers: make(http.Header), + } + + if instrumented, ok := r.transport.(elastictransport.Instrumented); ok { + if instrument := instrumented.InstrumentationEnabled(); instrument != nil { + r.instrument = instrument + } + } + + return r +} + +// HttpRequest returns the http.Request object built from the +// given parameters. +func (r *GetNodeStats) HttpRequest(ctx context.Context) (*http.Request, error) { + var path strings.Builder + var method string + var req *http.Request + + var err error + + r.path.Scheme = "http" + + switch { + case r.paramSet == 0: + path.WriteString("/") + path.WriteString("_transform") + path.WriteString("/") + path.WriteString("_node_stats") + + method = http.MethodGet + } + + r.path.Path = path.String() + r.path.RawQuery = r.values.Encode() + + if r.path.Path == "" { + return nil, ErrBuildPath + } + + if ctx != nil { + req, err = http.NewRequestWithContext(ctx, method, r.path.String(), r.raw) + } else { + req, err = http.NewRequest(method, r.path.String(), r.raw) + } + + req.Header = r.headers.Clone() + + if req.Header.Get("Accept") == "" { + req.Header.Set("Accept", "application/vnd.elasticsearch+json;compatible-with=8") + } + + if err != nil { + return req, fmt.Errorf("could not build http.Request: %w", err) + } + + return req, nil +} + +// Perform runs the http.Request through the provided transport and returns an http.Response. +func (r GetNodeStats) Perform(providedCtx context.Context) (*http.Response, error) { + var ctx context.Context + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + if r.spanStarted == false { + ctx := instrument.Start(providedCtx, "transform.get_node_stats") + defer instrument.Close(ctx) + } + } + if ctx == nil { + ctx = providedCtx + } + + req, err := r.HttpRequest(ctx) + if err != nil { + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return nil, err + } + + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.BeforeRequest(req, "transform.get_node_stats") + if reader := instrument.RecordRequestBody(ctx, "transform.get_node_stats", r.raw); reader != nil { + req.Body = reader + } + } + res, err := r.transport.Perform(req) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.AfterRequest(req, "elasticsearch", "transform.get_node_stats") + } + if err != nil { + localErr := fmt.Errorf("an error happened during the GetNodeStats query execution: %w", err) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, localErr) + } + return nil, localErr + } + + return res, nil +} + +// Do runs the request through the transport, handle the response and returns a getnodestats.Response +func (r GetNodeStats) Do(ctx context.Context) (bool, error) { + return r.IsSuccess(ctx) +} + +// IsSuccess allows to run a query with a context and retrieve the result as a boolean. +// This only exists for endpoints without a request payload and allows for quick control flow. +func (r GetNodeStats) IsSuccess(providedCtx context.Context) (bool, error) { + var ctx context.Context + r.spanStarted = true + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + ctx = instrument.Start(providedCtx, "transform.get_node_stats") + defer instrument.Close(ctx) + } + if ctx == nil { + ctx = providedCtx + } + + res, err := r.Perform(ctx) + + if err != nil { + return false, err + } + io.Copy(io.Discard, res.Body) + err = res.Body.Close() + if err != nil { + return false, err + } + + if res.StatusCode >= 200 && res.StatusCode < 300 { + return true, nil + } + + if res.StatusCode != 404 { + err := fmt.Errorf("an error happened during the GetNodeStats query execution, status code: %d", res.StatusCode) + if instrument, ok := r.instrument.(elastictransport.Instrumentation); ok { + instrument.RecordError(ctx, err) + } + return false, err + } + + return false, nil +} + +// Header set a key, value pair in the GetNodeStats headers map. +func (r *GetNodeStats) Header(key, value string) *GetNodeStats { + r.headers.Set(key, value) + + return r +} diff --git a/typedapi/transform/gettransform/get_transform.go b/typedapi/transform/gettransform/get_transform.go index 6b8d89b4d0..177d689202 100644 --- a/typedapi/transform/gettransform/get_transform.go +++ b/typedapi/transform/gettransform/get_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves configuration information for transforms. package gettransform @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -264,7 +263,7 @@ func (r GetTransform) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -344,3 +343,47 @@ func (r *GetTransform) ExcludeGenerated(excludegenerated bool) *GetTransform { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetTransform) ErrorTrace(errortrace bool) *GetTransform { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetTransform) FilterPath(filterpaths ...string) *GetTransform { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetTransform) Human(human bool) *GetTransform { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetTransform) Pretty(pretty bool) *GetTransform { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/transform/gettransform/response.go b/typedapi/transform/gettransform/response.go index 9dfe5bb7dd..4a7ddce256 100644 --- a/typedapi/transform/gettransform/response.go +++ b/typedapi/transform/gettransform/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package gettransform @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettransform // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/get_transform/GetTransformResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/get_transform/GetTransformResponse.ts#L23-L25 type Response struct { Count int64 `json:"count"` Transforms []types.TransformSummary `json:"transforms"` diff --git a/typedapi/transform/gettransformstats/get_transform_stats.go b/typedapi/transform/gettransformstats/get_transform_stats.go index 713aa4a3bb..d168cb8574 100644 --- a/typedapi/transform/gettransformstats/get_transform_stats.go +++ b/typedapi/transform/gettransformstats/get_transform_stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves usage information for transforms. package gettransformstats @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -263,7 +262,7 @@ func (r GetTransformStats) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -341,3 +340,47 @@ func (r *GetTransformStats) Timeout(duration string) *GetTransformStats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetTransformStats) ErrorTrace(errortrace bool) *GetTransformStats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetTransformStats) FilterPath(filterpaths ...string) *GetTransformStats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetTransformStats) Human(human bool) *GetTransformStats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetTransformStats) Pretty(pretty bool) *GetTransformStats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/transform/gettransformstats/response.go b/typedapi/transform/gettransformstats/response.go index 4f5ef21420..26ed750899 100644 --- a/typedapi/transform/gettransformstats/response.go +++ b/typedapi/transform/gettransformstats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package gettransformstats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package gettransformstats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/get_transform_stats/GetTransformStatsResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/get_transform_stats/GetTransformStatsResponse.ts#L23-L25 type Response struct { Count int64 `json:"count"` Transforms []types.TransformStats `json:"transforms"` diff --git a/typedapi/transform/previewtransform/preview_transform.go b/typedapi/transform/previewtransform/preview_transform.go index cbd044fdbb..8f59ab244c 100644 --- a/typedapi/transform/previewtransform/preview_transform.go +++ b/typedapi/transform/previewtransform/preview_transform.go @@ -16,9 +16,15 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Previews a transform. +// +// It returns a maximum of 100 results. The calculations are based on all the +// current data in the source index. It also +// generates a list of mappings and settings for the destination index. These +// values are determined based on the field +// types of the source index and the transform aggregations. package previewtransform import ( @@ -30,6 +36,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -80,6 +87,12 @@ func NewPreviewTransformFunc(tp elastictransport.Interface) NewPreviewTransform // Previews a transform. // +// It returns a maximum of 100 results. The calculations are based on all the +// current data in the source index. It also +// generates a list of mappings and settings for the destination index. These +// values are determined based on the field +// types of the source index and the transform aggregations. +// // https://www.elastic.co/guide/en/elasticsearch/reference/current/preview-transform.html func New(tp elastictransport.Interface) *PreviewTransform { r := &PreviewTransform{ @@ -330,6 +343,50 @@ func (r *PreviewTransform) Timeout(duration string) *PreviewTransform { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PreviewTransform) ErrorTrace(errortrace bool) *PreviewTransform { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PreviewTransform) FilterPath(filterpaths ...string) *PreviewTransform { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PreviewTransform) Human(human bool) *PreviewTransform { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PreviewTransform) Pretty(pretty bool) *PreviewTransform { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Description Free text description of the transform. // API name: description func (r *PreviewTransform) Description(description string) *PreviewTransform { diff --git a/typedapi/transform/previewtransform/request.go b/typedapi/transform/previewtransform/request.go index 1c9121524f..d68edc2aab 100644 --- a/typedapi/transform/previewtransform/request.go +++ b/typedapi/transform/previewtransform/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package previewtransform @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package previewtransform // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/preview_transform/PreviewTransformRequest.ts#L33-L107 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/preview_transform/PreviewTransformRequest.ts#L33-L107 type Request struct { // Description Free text description of the transform. @@ -66,6 +66,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/transform/previewtransform/response.go b/typedapi/transform/previewtransform/response.go index f57d8d6dc6..15f64f9b91 100644 --- a/typedapi/transform/previewtransform/response.go +++ b/typedapi/transform/previewtransform/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package previewtransform @@ -28,7 +28,7 @@ import ( // Response holds the response body struct for the package previewtransform // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/preview_transform/PreviewTransformResponse.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/preview_transform/PreviewTransformResponse.ts#L22-L27 type Response struct { GeneratedDestIndex types.IndexState `json:"generated_dest_index"` Preview []json.RawMessage `json:"preview"` diff --git a/typedapi/transform/puttransform/put_transform.go b/typedapi/transform/puttransform/put_transform.go index d90bb46422..720c7c18bd 100644 --- a/typedapi/transform/puttransform/put_transform.go +++ b/typedapi/transform/puttransform/put_transform.go @@ -16,9 +16,42 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Instantiates a transform. +// Creates a transform. +// +// A transform copies data from source indices, transforms it, and persists it +// into an entity-centric destination index. You can also think of the +// destination index as a two-dimensional tabular data structure (known as +// a data frame). The ID for each document in the data frame is generated from a +// hash of the entity, so there is a +// unique row per entity. +// +// You must choose either the latest or pivot method for your transform; you +// cannot use both in a single transform. If +// you choose to use the pivot method for your transform, the entities are +// defined by the set of `group_by` fields in +// the pivot object. If you choose to use the latest method, the entities are +// defined by the `unique_key` field values +// in the latest object. +// +// You must have `create_index`, `index`, and `read` privileges on the +// destination index and `read` and +// `view_index_metadata` privileges on the source indices. When Elasticsearch +// security features are enabled, the +// transform remembers which roles the user that created it had at the time of +// creation and uses those same roles. If +// those roles do not have the required privileges on the source and destination +// indices, the transform fails when it +// attempts unauthorized operations. +// +// NOTE: You must use Kibana or this API to create a transform. Do not add a +// transform directly into any +// `.transform-internal*` indices using the Elasticsearch index API. If +// Elasticsearch security features are enabled, do +// not give users any privileges on `.transform-internal*` indices. If you used +// transforms prior to 7.5, also do not +// give users any privileges on `.data-frame-internal*` indices. package puttransform import ( @@ -81,7 +114,40 @@ func NewPutTransformFunc(tp elastictransport.Interface) NewPutTransform { } } -// Instantiates a transform. +// Creates a transform. +// +// A transform copies data from source indices, transforms it, and persists it +// into an entity-centric destination index. You can also think of the +// destination index as a two-dimensional tabular data structure (known as +// a data frame). The ID for each document in the data frame is generated from a +// hash of the entity, so there is a +// unique row per entity. +// +// You must choose either the latest or pivot method for your transform; you +// cannot use both in a single transform. If +// you choose to use the pivot method for your transform, the entities are +// defined by the set of `group_by` fields in +// the pivot object. If you choose to use the latest method, the entities are +// defined by the `unique_key` field values +// in the latest object. +// +// You must have `create_index`, `index`, and `read` privileges on the +// destination index and `read` and +// `view_index_metadata` privileges on the source indices. When Elasticsearch +// security features are enabled, the +// transform remembers which roles the user that created it had at the time of +// creation and uses those same roles. If +// those roles do not have the required privileges on the source and destination +// indices, the transform fails when it +// attempts unauthorized operations. +// +// NOTE: You must use Kibana or this API to create a transform. Do not add a +// transform directly into any +// `.transform-internal*` indices using the Elasticsearch index API. If +// Elasticsearch security features are enabled, do +// not give users any privileges on `.transform-internal*` indices. If you used +// transforms prior to 7.5, also do not +// give users any privileges on `.data-frame-internal*` indices. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/put-transform.html func New(tp elastictransport.Interface) *PutTransform { @@ -341,6 +407,50 @@ func (r *PutTransform) Timeout(duration string) *PutTransform { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutTransform) ErrorTrace(errortrace bool) *PutTransform { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutTransform) FilterPath(filterpaths ...string) *PutTransform { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutTransform) Human(human bool) *PutTransform { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutTransform) Pretty(pretty bool) *PutTransform { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Description Free text description of the transform. // API name: description func (r *PutTransform) Description(description string) *PutTransform { diff --git a/typedapi/transform/puttransform/request.go b/typedapi/transform/puttransform/request.go index 9386848fbd..5de7bb76c9 100644 --- a/typedapi/transform/puttransform/request.go +++ b/typedapi/transform/puttransform/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package puttransform @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package puttransform // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/put_transform/PutTransformRequest.ts#L33-L122 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/put_transform/PutTransformRequest.ts#L33-L122 type Request struct { // Description Free text description of the transform. @@ -70,6 +70,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/transform/puttransform/response.go b/typedapi/transform/puttransform/response.go index 718d817bb9..0cf138e931 100644 --- a/typedapi/transform/puttransform/response.go +++ b/typedapi/transform/puttransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package puttransform // Response holds the response body struct for the package puttransform // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/put_transform/PutTransformResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/put_transform/PutTransformResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/transform/resettransform/reset_transform.go b/typedapi/transform/resettransform/reset_transform.go index 3e14ded529..76c430a28c 100644 --- a/typedapi/transform/resettransform/reset_transform.go +++ b/typedapi/transform/resettransform/reset_transform.go @@ -16,9 +16,12 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Resets an existing transform. +// Resets a transform. +// Before you can reset it, you must stop it; alternatively, use the `force` +// query parameter. +// If the destination index was created by the transform, it is deleted. package resettransform import ( @@ -27,7 +30,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -77,7 +79,10 @@ func NewResetTransformFunc(tp elastictransport.Interface) NewResetTransform { } } -// Resets an existing transform. +// Resets a transform. +// Before you can reset it, you must stop it; alternatively, use the `force` +// query parameter. +// If the destination index was created by the transform, it is deleted. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/reset-transform.html func New(tp elastictransport.Interface) *ResetTransform { @@ -263,7 +268,7 @@ func (r ResetTransform) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -312,3 +317,47 @@ func (r *ResetTransform) Force(force bool) *ResetTransform { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ResetTransform) ErrorTrace(errortrace bool) *ResetTransform { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ResetTransform) FilterPath(filterpaths ...string) *ResetTransform { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ResetTransform) Human(human bool) *ResetTransform { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ResetTransform) Pretty(pretty bool) *ResetTransform { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/transform/resettransform/response.go b/typedapi/transform/resettransform/response.go index 63001e9e7a..fe2bcdaa36 100644 --- a/typedapi/transform/resettransform/response.go +++ b/typedapi/transform/resettransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package resettransform // Response holds the response body struct for the package resettransform // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/reset_transform/ResetTransformResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/reset_transform/ResetTransformResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/transform/schedulenowtransform/response.go b/typedapi/transform/schedulenowtransform/response.go index 7917f8ff0a..226d36d1d2 100644 --- a/typedapi/transform/schedulenowtransform/response.go +++ b/typedapi/transform/schedulenowtransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package schedulenowtransform // Response holds the response body struct for the package schedulenowtransform // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/schedule_now_transform/ScheduleNowTransformResponse.ts#L21-L23 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/schedule_now_transform/ScheduleNowTransformResponse.ts#L21-L23 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/transform/schedulenowtransform/schedule_now_transform.go b/typedapi/transform/schedulenowtransform/schedule_now_transform.go index 78eb353d03..1013862e70 100644 --- a/typedapi/transform/schedulenowtransform/schedule_now_transform.go +++ b/typedapi/transform/schedulenowtransform/schedule_now_transform.go @@ -16,9 +16,16 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Schedules now a transform. +// +// If you _schedule_now a transform, it will process the new data instantly, +// without waiting for the configured frequency interval. After _schedule_now +// API is called, +// the transform will be processed again at now + frequency unless _schedule_now +// API +// is called again in the meantime. package schedulenowtransform import ( @@ -27,9 +34,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -78,6 +85,13 @@ func NewScheduleNowTransformFunc(tp elastictransport.Interface) NewScheduleNowTr // Schedules now a transform. // +// If you _schedule_now a transform, it will process the new data instantly, +// without waiting for the configured frequency interval. After _schedule_now +// API is called, +// the transform will be processed again at now + frequency unless _schedule_now +// API +// is called again in the meantime. +// // https://www.elastic.co/guide/en/elasticsearch/reference/current/schedule-now-transform.html func New(tp elastictransport.Interface) *ScheduleNowTransform { r := &ScheduleNowTransform{ @@ -268,7 +282,7 @@ func (r ScheduleNowTransform) IsSuccess(providedCtx context.Context) (bool, erro if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -312,3 +326,47 @@ func (r *ScheduleNowTransform) Timeout(duration string) *ScheduleNowTransform { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ScheduleNowTransform) ErrorTrace(errortrace bool) *ScheduleNowTransform { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ScheduleNowTransform) FilterPath(filterpaths ...string) *ScheduleNowTransform { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ScheduleNowTransform) Human(human bool) *ScheduleNowTransform { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ScheduleNowTransform) Pretty(pretty bool) *ScheduleNowTransform { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/transform/starttransform/response.go b/typedapi/transform/starttransform/response.go index 7a2b17bdd1..e470672d16 100644 --- a/typedapi/transform/starttransform/response.go +++ b/typedapi/transform/starttransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package starttransform // Response holds the response body struct for the package starttransform // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/start_transform/StartTransformResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/start_transform/StartTransformResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/transform/starttransform/start_transform.go b/typedapi/transform/starttransform/start_transform.go index 2a643876f3..ab7b9328f0 100644 --- a/typedapi/transform/starttransform/start_transform.go +++ b/typedapi/transform/starttransform/start_transform.go @@ -16,9 +16,36 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Starts one or more transforms. +// Starts a transform. +// +// When you start a transform, it creates the destination index if it does not +// already exist. The `number_of_shards` is +// set to `1` and the `auto_expand_replicas` is set to `0-1`. If it is a pivot +// transform, it deduces the mapping +// definitions for the destination index from the source indices and the +// transform aggregations. If fields in the +// destination index are derived from scripts (as in the case of +// `scripted_metric` or `bucket_script` aggregations), +// the transform uses dynamic mappings unless an index template exists. If it is +// a latest transform, it does not deduce +// mapping definitions; it uses dynamic mappings. To use explicit mappings, +// create the destination index before you +// start the transform. Alternatively, you can create an index template, though +// it does not affect the deduced mappings +// in a pivot transform. +// +// When the transform starts, a series of validations occur to ensure its +// success. If you deferred validation when you +// created the transform, they occur when you start the transform—​with the +// exception of privilege checks. When +// Elasticsearch security features are enabled, the transform remembers which +// roles the user that created it had at the +// time of creation and uses those same roles. If those roles do not have the +// required privileges on the source and +// destination indices, the transform fails when it attempts unauthorized +// operations. package starttransform import ( @@ -27,9 +54,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -76,7 +103,34 @@ func NewStartTransformFunc(tp elastictransport.Interface) NewStartTransform { } } -// Starts one or more transforms. +// Starts a transform. +// +// When you start a transform, it creates the destination index if it does not +// already exist. The `number_of_shards` is +// set to `1` and the `auto_expand_replicas` is set to `0-1`. If it is a pivot +// transform, it deduces the mapping +// definitions for the destination index from the source indices and the +// transform aggregations. If fields in the +// destination index are derived from scripts (as in the case of +// `scripted_metric` or `bucket_script` aggregations), +// the transform uses dynamic mappings unless an index template exists. If it is +// a latest transform, it does not deduce +// mapping definitions; it uses dynamic mappings. To use explicit mappings, +// create the destination index before you +// start the transform. Alternatively, you can create an index template, though +// it does not affect the deduced mappings +// in a pivot transform. +// +// When the transform starts, a series of validations occur to ensure its +// success. If you deferred validation when you +// created the transform, they occur when you start the transform—​with the +// exception of privilege checks. When +// Elasticsearch security features are enabled, the transform remembers which +// roles the user that created it had at the +// time of creation and uses those same roles. If those roles do not have the +// required privileges on the source and +// destination indices, the transform fails when it attempts unauthorized +// operations. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/start-transform.html func New(tp elastictransport.Interface) *StartTransform { @@ -262,7 +316,7 @@ func (r StartTransform) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -317,3 +371,47 @@ func (r *StartTransform) From(from string) *StartTransform { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *StartTransform) ErrorTrace(errortrace bool) *StartTransform { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *StartTransform) FilterPath(filterpaths ...string) *StartTransform { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *StartTransform) Human(human bool) *StartTransform { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *StartTransform) Pretty(pretty bool) *StartTransform { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/transform/stoptransform/response.go b/typedapi/transform/stoptransform/response.go index ae85a1d344..0e668e30f8 100644 --- a/typedapi/transform/stoptransform/response.go +++ b/typedapi/transform/stoptransform/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stoptransform // Response holds the response body struct for the package stoptransform // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/stop_transform/StopTransformResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/stop_transform/StopTransformResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/transform/stoptransform/stop_transform.go b/typedapi/transform/stoptransform/stop_transform.go index 95521616e7..899d3ee03c 100644 --- a/typedapi/transform/stoptransform/stop_transform.go +++ b/typedapi/transform/stoptransform/stop_transform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Stops one or more transforms. package stoptransform @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -263,7 +262,7 @@ func (r StopTransform) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -360,3 +359,47 @@ func (r *StopTransform) WaitForCompletion(waitforcompletion bool) *StopTransform return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *StopTransform) ErrorTrace(errortrace bool) *StopTransform { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *StopTransform) FilterPath(filterpaths ...string) *StopTransform { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *StopTransform) Human(human bool) *StopTransform { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *StopTransform) Pretty(pretty bool) *StopTransform { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/transform/updatetransform/request.go b/typedapi/transform/updatetransform/request.go index f6c3b1d9aa..46d5d9a05b 100644 --- a/typedapi/transform/updatetransform/request.go +++ b/typedapi/transform/updatetransform/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatetransform @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package updatetransform // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/update_transform/UpdateTransformRequest.ts#L31-L105 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/update_transform/UpdateTransformRequest.ts#L31-L105 type Request struct { // Description Free text description of the transform. @@ -49,7 +49,7 @@ type Request struct { Meta_ types.Metadata `json:"_meta,omitempty"` // RetentionPolicy Defines a retention policy for the transform. Data that meets the defined // criteria is deleted from the destination index. - RetentionPolicy types.RetentionPolicyContainer `json:"retention_policy,omitempty"` + RetentionPolicy *types.RetentionPolicyContainer `json:"retention_policy,omitempty"` // Settings Defines optional transform settings. Settings *types.Settings `json:"settings,omitempty"` // Source The source of the data for the transform. @@ -61,6 +61,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } diff --git a/typedapi/transform/updatetransform/response.go b/typedapi/transform/updatetransform/response.go index bf994ada83..93851b982c 100644 --- a/typedapi/transform/updatetransform/response.go +++ b/typedapi/transform/updatetransform/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package updatetransform @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package updatetransform // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/update_transform/UpdateTransformResponse.ts#L33-L51 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/update_transform/UpdateTransformResponse.ts#L33-L51 type Response struct { Authorization *types.TransformAuthorization `json:"authorization,omitempty"` CreateTime int64 `json:"create_time"` diff --git a/typedapi/transform/updatetransform/update_transform.go b/typedapi/transform/updatetransform/update_transform.go index 7edb963e2d..81bd3687c8 100644 --- a/typedapi/transform/updatetransform/update_transform.go +++ b/typedapi/transform/updatetransform/update_transform.go @@ -16,9 +16,19 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Updates certain properties of a transform. +// +// All updated properties except `description` do not take effect until after +// the transform starts the next checkpoint, +// thus there is data consistency in each checkpoint. To use this API, you must +// have `read` and `view_index_metadata` +// privileges for the source indices. You must also have `index` and `read` +// privileges for the destination index. When +// Elasticsearch security features are enabled, the transform remembers which +// roles the user who updated it had at the +// time of update and runs with those privileges. package updatetransform import ( @@ -83,6 +93,16 @@ func NewUpdateTransformFunc(tp elastictransport.Interface) NewUpdateTransform { // Updates certain properties of a transform. // +// All updated properties except `description` do not take effect until after +// the transform starts the next checkpoint, +// thus there is data consistency in each checkpoint. To use this API, you must +// have `read` and `view_index_metadata` +// privileges for the source indices. You must also have `index` and `read` +// privileges for the destination index. When +// Elasticsearch security features are enabled, the transform remembers which +// roles the user who updated it had at the +// time of update and runs with those privileges. +// // https://www.elastic.co/guide/en/elasticsearch/reference/current/update-transform.html func New(tp elastictransport.Interface) *UpdateTransform { r := &UpdateTransform{ @@ -334,6 +354,50 @@ func (r *UpdateTransform) Timeout(duration string) *UpdateTransform { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *UpdateTransform) ErrorTrace(errortrace bool) *UpdateTransform { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *UpdateTransform) FilterPath(filterpaths ...string) *UpdateTransform { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *UpdateTransform) Human(human bool) *UpdateTransform { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *UpdateTransform) Pretty(pretty bool) *UpdateTransform { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // Description Free text description of the transform. // API name: description func (r *UpdateTransform) Description(description string) *UpdateTransform { @@ -375,7 +439,7 @@ func (r *UpdateTransform) Meta_(metadata types.Metadata) *UpdateTransform { // criteria is deleted from the destination index. // API name: retention_policy func (r *UpdateTransform) RetentionPolicy(retentionpolicy types.RetentionPolicyContainer) *UpdateTransform { - r.req.RetentionPolicy = retentionpolicy + r.req.RetentionPolicy = &retentionpolicy return r } diff --git a/typedapi/transform/upgradetransforms/response.go b/typedapi/transform/upgradetransforms/response.go index 4738d3a0ce..6e65e34ca4 100644 --- a/typedapi/transform/upgradetransforms/response.go +++ b/typedapi/transform/upgradetransforms/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package upgradetransforms // Response holds the response body struct for the package upgradetransforms // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/upgrade_transforms/UpgradeTransformsResponse.ts#L25-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/upgrade_transforms/UpgradeTransformsResponse.ts#L25-L34 type Response struct { // NeedsUpdate The number of transforms that need to be upgraded. diff --git a/typedapi/transform/upgradetransforms/upgrade_transforms.go b/typedapi/transform/upgradetransforms/upgrade_transforms.go index c29b2ac7e5..d7cc5999cb 100644 --- a/typedapi/transform/upgradetransforms/upgrade_transforms.go +++ b/typedapi/transform/upgradetransforms/upgrade_transforms.go @@ -16,9 +16,18 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Upgrades all transforms. +// This API identifies transforms that have a legacy configuration format and +// upgrades them to the latest version. It +// also cleans up the internal data structures that store the transform state +// and checkpoints. The upgrade does not +// affect the source and destination indices. The upgrade also does not affect +// the roles that transforms use when +// Elasticsearch security features are enabled; the role used to read source +// data and write to the destination index +// remains unchanged. package upgradetransforms import ( @@ -27,7 +36,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -70,6 +78,15 @@ func NewUpgradeTransformsFunc(tp elastictransport.Interface) NewUpgradeTransform } // Upgrades all transforms. +// This API identifies transforms that have a legacy configuration format and +// upgrades them to the latest version. It +// also cleans up the internal data structures that store the transform state +// and checkpoints. The upgrade does not +// affect the source and destination indices. The upgrade also does not affect +// the roles that transforms use when +// Elasticsearch security features are enabled; the role used to read source +// data and write to the destination index +// remains unchanged. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/upgrade-transforms.html func New(tp elastictransport.Interface) *UpgradeTransforms { @@ -255,7 +272,7 @@ func (r UpgradeTransforms) IsSuccess(providedCtx context.Context) (bool, error) if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -300,3 +317,47 @@ func (r *UpgradeTransforms) Timeout(duration string) *UpgradeTransforms { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *UpgradeTransforms) ErrorTrace(errortrace bool) *UpgradeTransforms { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *UpgradeTransforms) FilterPath(filterpaths ...string) *UpgradeTransforms { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *UpgradeTransforms) Human(human bool) *UpgradeTransforms { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *UpgradeTransforms) Pretty(pretty bool) *UpgradeTransforms { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/types/acknowledgement.go b/typedapi/types/acknowledgement.go index 16e3cdc469..6eb50e5e34 100644 --- a/typedapi/types/acknowledgement.go +++ b/typedapi/types/acknowledgement.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Acknowledgement type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/license/post/types.ts#L20-L23 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/license/post/types.ts#L20-L23 type Acknowledgement struct { License []string `json:"license"` Message string `json:"message"` diff --git a/typedapi/types/acknowledgestate.go b/typedapi/types/acknowledgestate.go index e76a8c2371..ad1524bd4d 100644 --- a/typedapi/types/acknowledgestate.go +++ b/typedapi/types/acknowledgestate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // AcknowledgeState type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Action.ts#L115-L118 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Action.ts#L115-L118 type AcknowledgeState struct { State acknowledgementoptions.AcknowledgementOptions `json:"state"` Timestamp DateTime `json:"timestamp"` diff --git a/typedapi/types/actionstatus.go b/typedapi/types/actionstatus.go index 64fb37b427..0dd88114c7 100644 --- a/typedapi/types/actionstatus.go +++ b/typedapi/types/actionstatus.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ActionStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Action.ts#L131-L136 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Action.ts#L131-L136 type ActionStatus struct { Ack AcknowledgeState `json:"ack"` LastExecution *ExecutionState `json:"last_execution,omitempty"` diff --git a/typedapi/types/activationstate.go b/typedapi/types/activationstate.go index e4d3261dee..e6675a6244 100644 --- a/typedapi/types/activationstate.go +++ b/typedapi/types/activationstate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ActivationState type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Activation.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Activation.ts#L24-L27 type ActivationState struct { Active bool `json:"active"` Timestamp DateTime `json:"timestamp"` @@ -53,7 +53,7 @@ func (s *ActivationState) UnmarshalJSON(data []byte) error { switch t { case "active": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/activationstatus.go b/typedapi/types/activationstatus.go index 515f967d50..2ec851cfec 100644 --- a/typedapi/types/activationstatus.go +++ b/typedapi/types/activationstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ActivationStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Activation.ts#L29-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Activation.ts#L29-L33 type ActivationStatus struct { Actions WatcherStatusActions `json:"actions"` State ActivationState `json:"state"` diff --git a/typedapi/types/adaptiveselection.go b/typedapi/types/adaptiveselection.go index 83f24237ce..e466609098 100644 --- a/typedapi/types/adaptiveselection.go +++ b/typedapi/types/adaptiveselection.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AdaptiveSelection type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L403-L432 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L403-L432 type AdaptiveSelection struct { // AvgQueueSize The exponentially weighted moving average queue size of search requests on // the keyed node. @@ -71,7 +71,7 @@ func (s *AdaptiveSelection) UnmarshalJSON(data []byte) error { switch t { case "avg_queue_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *AdaptiveSelection) UnmarshalJSON(data []byte) error { } case "avg_response_time_ns": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -111,7 +111,7 @@ func (s *AdaptiveSelection) UnmarshalJSON(data []byte) error { } case "avg_service_time_ns": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -126,7 +126,7 @@ func (s *AdaptiveSelection) UnmarshalJSON(data []byte) error { } case "outgoing_searches": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/addaction.go b/typedapi/types/addaction.go index 47b4ad20ac..868164846a 100644 --- a/typedapi/types/addaction.go +++ b/typedapi/types/addaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AddAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/update_aliases/types.ts#L41-L95 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/update_aliases/types.ts#L41-L95 type AddAction struct { // Alias Alias for the action. // Index alias names support date math. @@ -134,7 +134,7 @@ func (s *AddAction) UnmarshalJSON(data []byte) error { } case "is_hidden": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -148,7 +148,7 @@ func (s *AddAction) UnmarshalJSON(data []byte) error { } case "is_write_index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -162,7 +162,7 @@ func (s *AddAction) UnmarshalJSON(data []byte) error { } case "must_exist": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/adjacencymatrixaggregate.go b/typedapi/types/adjacencymatrixaggregate.go index 8904db4389..47b320fc88 100644 --- a/typedapi/types/adjacencymatrixaggregate.go +++ b/typedapi/types/adjacencymatrixaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // AdjacencyMatrixAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L573-L575 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L577-L579 type AdjacencyMatrixAggregate struct { Buckets BucketsAdjacencyMatrixBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/adjacencymatrixaggregation.go b/typedapi/types/adjacencymatrixaggregation.go index 2420bbf954..7af275d493 100644 --- a/typedapi/types/adjacencymatrixaggregation.go +++ b/typedapi/types/adjacencymatrixaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,13 +31,13 @@ import ( // AdjacencyMatrixAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L57-L63 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L55-L65 type AdjacencyMatrixAggregation struct { // Filters Filters used to create buckets. // At least one filter is required. Filters map[string]Query `json:"filters,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + // Separator Separator used to concatenate filter names. Defaults to &. + Separator *string `json:"separator,omitempty"` } func (s *AdjacencyMatrixAggregation) UnmarshalJSON(data []byte) error { @@ -63,22 +63,17 @@ func (s *AdjacencyMatrixAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Filters", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": + case "separator": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) + return fmt.Errorf("%s | %w", "Separator", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) if err != nil { o = string(tmp[:]) } - s.Name = &o + s.Separator = &o } } diff --git a/typedapi/types/adjacencymatrixbucket.go b/typedapi/types/adjacencymatrixbucket.go index 4e1fbee009..14f8fd7d3d 100644 --- a/typedapi/types/adjacencymatrixbucket.go +++ b/typedapi/types/adjacencymatrixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // AdjacencyMatrixBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L577-L579 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L581-L583 type AdjacencyMatrixBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -55,7 +55,7 @@ func (s *AdjacencyMatrixBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -526,7 +526,7 @@ func (s *AdjacencyMatrixBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -576,7 +576,7 @@ func (s *AdjacencyMatrixBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -586,7 +586,7 @@ func (s *AdjacencyMatrixBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -603,7 +603,7 @@ func (s *AdjacencyMatrixBucket) UnmarshalJSON(data []byte) error { func (s AdjacencyMatrixBucket) MarshalJSON() ([]byte, error) { type opt AdjacencyMatrixBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/aggregate.go b/typedapi/types/aggregate.go index adced94e03..dcc63cfa2f 100644 --- a/typedapi/types/aggregate.go +++ b/typedapi/types/aggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -92,5 +92,5 @@ package types // MatrixStatsAggregate // GeoLineAggregate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L38-L123 -type Aggregate interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L38-L123 +type Aggregate any diff --git a/typedapi/types/aggregatemetricdoubleproperty.go b/typedapi/types/aggregatemetricdoubleproperty.go index b65bde5dfd..140d72efed 100644 --- a/typedapi/types/aggregatemetricdoubleproperty.go +++ b/typedapi/types/aggregatemetricdoubleproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // AggregateMetricDoubleProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/complex.ts#L60-L65 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/complex.ts#L61-L66 type AggregateMetricDoubleProperty struct { DefaultMetric string `json:"default_metric"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` @@ -87,7 +87,7 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -108,7 +108,7 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -198,12 +198,6 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -222,6 +216,18 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -378,6 +384,12 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -389,7 +401,7 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -423,7 +435,7 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -444,7 +456,7 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -534,12 +546,6 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -558,6 +564,18 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -714,6 +732,12 @@ func (s *AggregateMetricDoubleProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { diff --git a/typedapi/types/aggregateorder.go b/typedapi/types/aggregateorder.go index b20d16e965..38b5af6ffe 100644 --- a/typedapi/types/aggregateorder.go +++ b/typedapi/types/aggregateorder.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]sortorder.SortOrder // []map[string]sortorder.SortOrder // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L976-L978 -type AggregateOrder interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L983-L985 +type AggregateOrder any diff --git a/typedapi/types/aggregateoutput.go b/typedapi/types/aggregateoutput.go index ba9e09d9c0..b94ea9fde9 100644 --- a/typedapi/types/aggregateoutput.go +++ b/typedapi/types/aggregateoutput.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // AggregateOutput type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model/types.ts#L101-L106 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model/types.ts#L101-L106 type AggregateOutput struct { Exponent *Weights `json:"exponent,omitempty"` LogisticRegression *Weights `json:"logistic_regression,omitempty"` diff --git a/typedapi/types/aggregation.go b/typedapi/types/aggregation.go index 1a0e057d87..7932e26f70 100644 --- a/typedapi/types/aggregation.go +++ b/typedapi/types/aggregation.go @@ -16,62 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "io" - "strconv" -) - // Aggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregation.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregation.ts#L20-L20 type Aggregation struct { - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` -} - -func (s *Aggregation) UnmarshalJSON(data []byte) error { - - dec := json.NewDecoder(bytes.NewReader(data)) - - for { - t, err := dec.Token() - if err != nil { - if errors.Is(err, io.EOF) { - break - } - return err - } - - switch t { - - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - - } - } - return nil } // NewAggregation returns a Aggregation. diff --git a/typedapi/types/aggregationbreakdown.go b/typedapi/types/aggregationbreakdown.go index 699a81695b..f63e1a1e60 100644 --- a/typedapi/types/aggregationbreakdown.go +++ b/typedapi/types/aggregationbreakdown.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AggregationBreakdown type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/profile.ts#L23-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/profile.ts#L23-L36 type AggregationBreakdown struct { BuildAggregation int64 `json:"build_aggregation"` BuildAggregationCount int64 `json:"build_aggregation_count"` @@ -63,7 +63,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { switch t { case "build_aggregation": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -78,7 +78,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { } case "build_aggregation_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -93,7 +93,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { } case "build_leaf_collector": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -108,7 +108,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { } case "build_leaf_collector_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -123,7 +123,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { } case "collect": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -138,7 +138,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { } case "collect_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -153,7 +153,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { } case "initialize": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -168,7 +168,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { } case "initialize_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -183,7 +183,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { } case "post_collection": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -198,7 +198,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { } case "post_collection_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -213,7 +213,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { } case "reduce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -228,7 +228,7 @@ func (s *AggregationBreakdown) UnmarshalJSON(data []byte) error { } case "reduce_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/aggregationprofile.go b/typedapi/types/aggregationprofile.go index ea1e684777..5afdd49adb 100644 --- a/typedapi/types/aggregationprofile.go +++ b/typedapi/types/aggregationprofile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AggregationProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/profile.ts#L77-L84 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/profile.ts#L77-L84 type AggregationProfile struct { Breakdown AggregationBreakdown `json:"breakdown"` Children []AggregationProfile `json:"children,omitempty"` diff --git a/typedapi/types/aggregationprofiledebug.go b/typedapi/types/aggregationprofiledebug.go index 4c9b285179..0ec540186f 100644 --- a/typedapi/types/aggregationprofiledebug.go +++ b/typedapi/types/aggregationprofiledebug.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AggregationProfileDebug type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/profile.ts#L39-L68 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/profile.ts#L39-L68 type AggregationProfileDebug struct { BuiltBuckets *int `json:"built_buckets,omitempty"` CharsFetched *int `json:"chars_fetched,omitempty"` @@ -80,7 +80,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "built_buckets": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "chars_fetched": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -112,7 +112,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "collect_analyzed_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -128,7 +128,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "collect_analyzed_ns": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -178,7 +178,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "empty_collectors_used": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -194,7 +194,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "extract_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -210,7 +210,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "extract_ns": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -230,7 +230,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { } case "has_filter": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -257,7 +257,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "numeric_collectors_used": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -273,7 +273,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "ordinals_collectors_overhead_too_high": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -289,7 +289,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "ordinals_collectors_used": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -317,7 +317,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "segments_collected": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -333,7 +333,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "segments_counted": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -349,7 +349,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "segments_with_deleted_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -365,7 +365,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "segments_with_doc_count_field": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -381,7 +381,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "segments_with_multi_valued_ords": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -397,7 +397,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "segments_with_single_valued_ords": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -413,7 +413,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "string_hashing_collectors_used": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -429,7 +429,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "surviving_buckets": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -445,7 +445,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "total_buckets": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -461,7 +461,7 @@ func (s *AggregationProfileDebug) UnmarshalJSON(data []byte) error { case "values_fetched": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/aggregationprofiledelegatedebugfilter.go b/typedapi/types/aggregationprofiledelegatedebugfilter.go index e764442117..da9088ae58 100644 --- a/typedapi/types/aggregationprofiledelegatedebugfilter.go +++ b/typedapi/types/aggregationprofiledelegatedebugfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AggregationProfileDelegateDebugFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/profile.ts#L70-L75 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/profile.ts#L70-L75 type AggregationProfileDelegateDebugFilter struct { Query *string `json:"query,omitempty"` ResultsFromMetadata *int `json:"results_from_metadata,omitempty"` @@ -68,7 +68,7 @@ func (s *AggregationProfileDelegateDebugFilter) UnmarshalJSON(data []byte) error case "results_from_metadata": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -84,7 +84,7 @@ func (s *AggregationProfileDelegateDebugFilter) UnmarshalJSON(data []byte) error case "segments_counted_in_constant_time": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/aggregationrange.go b/typedapi/types/aggregationrange.go index 313d35c9ef..877f2c1d8b 100644 --- a/typedapi/types/aggregationrange.go +++ b/typedapi/types/aggregationrange.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,14 +31,14 @@ import ( // AggregationRange type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L672-L685 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L674-L687 type AggregationRange struct { // From Start of the range (inclusive). - From string `json:"from,omitempty"` + From *Float64 `json:"from,omitempty"` // Key Custom key to return the range with. Key *string `json:"key,omitempty"` // To End of the range (exclusive). - To string `json:"to,omitempty"` + To *Float64 `json:"to,omitempty"` } func (s *AggregationRange) UnmarshalJSON(data []byte) error { @@ -57,16 +57,20 @@ func (s *AggregationRange) UnmarshalJSON(data []byte) error { switch t { case "from": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "From", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return fmt.Errorf("%s | %w", "From", err) + } + f := Float64(value) + s.From = &f + case float64: + f := Float64(v) + s.From = &f } - s.From = o case "key": var tmp json.RawMessage @@ -81,16 +85,20 @@ func (s *AggregationRange) UnmarshalJSON(data []byte) error { s.Key = &o case "to": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "To", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return fmt.Errorf("%s | %w", "To", err) + } + f := Float64(value) + s.To = &f + case float64: + f := Float64(v) + s.To = &f } - s.To = o } } diff --git a/typedapi/types/aggregations.go b/typedapi/types/aggregations.go index 0358e70e47..a6f7a99586 100644 --- a/typedapi/types/aggregations.go +++ b/typedapi/types/aggregations.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // Aggregations type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/AggregationContainer.ts#L106-L515 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/AggregationContainer.ts#L105-L514 type Aggregations struct { // AdjacencyMatrix A bucket aggregation returning a form of adjacency matrix. // The request provides a collection of named filter expressions, similar to the diff --git a/typedapi/types/alias.go b/typedapi/types/alias.go index ee552d2376..41da033b4d 100644 --- a/typedapi/types/alias.go +++ b/typedapi/types/alias.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Alias type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/Alias.ts#L23-L53 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/Alias.ts#L23-L53 type Alias struct { // Filter Query used to limit documents the alias can access. Filter *Query `json:"filter,omitempty"` @@ -76,7 +76,7 @@ func (s *Alias) UnmarshalJSON(data []byte) error { } case "is_hidden": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -90,7 +90,7 @@ func (s *Alias) UnmarshalJSON(data []byte) error { } case "is_write_index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/aliasdefinition.go b/typedapi/types/aliasdefinition.go index 204fbaebea..a6154a99c8 100644 --- a/typedapi/types/aliasdefinition.go +++ b/typedapi/types/aliasdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AliasDefinition type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/AliasDefinition.ts#L22-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/AliasDefinition.ts#L22-L54 type AliasDefinition struct { // Filter Query used to limit documents the alias can access. Filter *Query `json:"filter,omitempty"` @@ -83,7 +83,7 @@ func (s *AliasDefinition) UnmarshalJSON(data []byte) error { s.IndexRouting = &o case "is_hidden": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -97,7 +97,7 @@ func (s *AliasDefinition) UnmarshalJSON(data []byte) error { } case "is_write_index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/aliasesrecord.go b/typedapi/types/aliasesrecord.go index 73cf426c1e..130a6d6e44 100644 --- a/typedapi/types/aliasesrecord.go +++ b/typedapi/types/aliasesrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AliasesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/aliases/types.ts#L22-L53 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/aliases/types.ts#L22-L53 type AliasesRecord struct { // Alias alias name Alias *string `json:"alias,omitempty"` diff --git a/typedapi/types/allfield.go b/typedapi/types/allfield.go index d7694ac1d3..e5a913f564 100644 --- a/typedapi/types/allfield.go +++ b/typedapi/types/allfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AllField type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/meta-fields.ts#L29-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/meta-fields.ts#L29-L40 type AllField struct { Analyzer string `json:"analyzer"` Enabled bool `json:"enabled"` @@ -73,7 +73,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { s.Analyzer = o case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { } case "omit_norms": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { s.Similarity = o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -139,7 +139,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { } case "store_term_vector_offsets": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -153,7 +153,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { } case "store_term_vector_payloads": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -167,7 +167,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { } case "store_term_vector_positions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -181,7 +181,7 @@ func (s *AllField) UnmarshalJSON(data []byte) error { } case "store_term_vectors": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/allocationdecision.go b/typedapi/types/allocationdecision.go index 2eaadefa7c..9e22b0e8a2 100644 --- a/typedapi/types/allocationdecision.go +++ b/typedapi/types/allocationdecision.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // AllocationDecision type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/allocation_explain/types.ts#L26-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/allocation_explain/types.ts#L26-L30 type AllocationDecision struct { Decider string `json:"decider"` Decision allocationexplaindecision.AllocationExplainDecision `json:"decision"` diff --git a/typedapi/types/allocationrecord.go b/typedapi/types/allocationrecord.go index f9ac112629..02c6c4125d 100644 --- a/typedapi/types/allocationrecord.go +++ b/typedapi/types/allocationrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,23 +31,23 @@ import ( // AllocationRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/allocation/types.ts#L24-L75 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/allocation/types.ts#L24-L75 type AllocationRecord struct { // DiskAvail Free disk space available to Elasticsearch. // Elasticsearch retrieves this metric from the node’s operating system. // Disk-based shard allocation uses this metric to assign shards to nodes based // on available disk space. - DiskAvail ByteSize `json:"disk.avail,omitempty"` + DiskAvail *ByteSize `json:"disk.avail,omitempty"` // DiskIndices Disk space used by the node’s shards. Does not include disk space for the // translog or unassigned shards. // IMPORTANT: This metric double-counts disk space for hard-linked files, such // as those created when shrinking, splitting, or cloning an index. - DiskIndices ByteSize `json:"disk.indices,omitempty"` + DiskIndices *ByteSize `json:"disk.indices,omitempty"` // DiskPercent Total percentage of disk space in use. Calculated as `disk.used / // disk.total`. - DiskPercent Percentage `json:"disk.percent,omitempty"` + DiskPercent *Percentage `json:"disk.percent,omitempty"` // DiskTotal Total disk space for the node, including in-use and available space. - DiskTotal ByteSize `json:"disk.total,omitempty"` + DiskTotal *ByteSize `json:"disk.total,omitempty"` // DiskUsed Total disk space in use. // Elasticsearch retrieves this metric from the node’s operating system (OS). // The metric includes disk space for: Elasticsearch, including the translog and @@ -55,11 +55,11 @@ type AllocationRecord struct { // files on the node. // Unlike `disk.indices`, this metric does not double-count disk space for // hard-linked files. - DiskUsed ByteSize `json:"disk.used,omitempty"` + DiskUsed *ByteSize `json:"disk.used,omitempty"` // Host Network host for the node. Set using the `network.host` setting. - Host string `json:"host,omitempty"` + Host *string `json:"host,omitempty"` // Ip IP address and port for the node. - Ip string `json:"ip,omitempty"` + Ip *string `json:"ip,omitempty"` // Node Name for the node. Set using the `node.name` setting. Node *string `json:"node,omitempty"` // Shards Number of primary and replica shards assigned to the node. diff --git a/typedapi/types/allocationstore.go b/typedapi/types/allocationstore.go index 9237bf0ae8..d6f8662648 100644 --- a/typedapi/types/allocationstore.go +++ b/typedapi/types/allocationstore.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AllocationStore type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/allocation_explain/types.ts#L39-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/allocation_explain/types.ts#L39-L46 type AllocationStore struct { AllocationId string `json:"allocation_id"` Found bool `json:"found"` @@ -69,7 +69,7 @@ func (s *AllocationStore) UnmarshalJSON(data []byte) error { s.AllocationId = o case "found": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *AllocationStore) UnmarshalJSON(data []byte) error { } case "in_sync": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -97,7 +97,7 @@ func (s *AllocationStore) UnmarshalJSON(data []byte) error { } case "matching_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -112,7 +112,7 @@ func (s *AllocationStore) UnmarshalJSON(data []byte) error { } case "matching_sync_id": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/alwayscondition.go b/typedapi/types/alwayscondition.go index 69e3497ba0..d38b629cf6 100644 --- a/typedapi/types/alwayscondition.go +++ b/typedapi/types/alwayscondition.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // AlwaysCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Conditions.ts#L25-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Conditions.ts#L25-L25 type AlwaysCondition struct { } diff --git a/typedapi/types/analysisconfig.go b/typedapi/types/analysisconfig.go index 9f0725dd06..7a3d718b49 100644 --- a/typedapi/types/analysisconfig.go +++ b/typedapi/types/analysisconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AnalysisConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Analysis.ts#L29-L77 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Analysis.ts#L29-L77 type AnalysisConfig struct { // BucketSpan The size of the interval that the analysis is aggregated into, typically // between `5m` and `1h`. This value should be either a whole number of days or @@ -184,7 +184,7 @@ func (s *AnalysisConfig) UnmarshalJSON(data []byte) error { } case "multivariate_by_fields": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/analysisconfigread.go b/typedapi/types/analysisconfigread.go index 6dad8f0f76..d537a6052d 100644 --- a/typedapi/types/analysisconfigread.go +++ b/typedapi/types/analysisconfigread.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AnalysisConfigRead type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Analysis.ts#L79-L148 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Analysis.ts#L79-L148 type AnalysisConfigRead struct { // BucketSpan The size of the interval that the analysis is aggregated into, typically // between `5m` and `1h`. @@ -171,7 +171,7 @@ func (s *AnalysisConfigRead) UnmarshalJSON(data []byte) error { } case "multivariate_by_fields": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/analysislimits.go b/typedapi/types/analysislimits.go index 5ea3d8d86e..73281f4f53 100644 --- a/typedapi/types/analysislimits.go +++ b/typedapi/types/analysislimits.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AnalysisLimits type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Analysis.ts#L161-L172 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Analysis.ts#L161-L172 type AnalysisLimits struct { // CategorizationExamplesLimit The maximum number of examples stored per category in memory and in the // results data store. If you increase this value, more examples are available, @@ -74,7 +74,7 @@ func (s *AnalysisLimits) UnmarshalJSON(data []byte) error { switch t { case "categorization_examples_limit": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/analysismemorylimit.go b/typedapi/types/analysismemorylimit.go index 43c3ea3de3..0dbb5502a1 100644 --- a/typedapi/types/analysismemorylimit.go +++ b/typedapi/types/analysismemorylimit.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AnalysisMemoryLimit type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Analysis.ts#L174-L179 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Analysis.ts#L174-L179 type AnalysisMemoryLimit struct { // ModelMemoryLimit Limits can be applied for the resources required to hold the mathematical // models in memory. These limits are approximate and can be set per job. They diff --git a/typedapi/types/analytics.go b/typedapi/types/analytics.go index a540f3e9c5..c4dc5b51f5 100644 --- a/typedapi/types/analytics.go +++ b/typedapi/types/analytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Analytics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L330-L332 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L330-L332 type Analytics struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -54,7 +54,7 @@ func (s *Analytics) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -68,7 +68,7 @@ func (s *Analytics) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/analyticscollection.go b/typedapi/types/analyticscollection.go index b1153893ca..ce678aa863 100644 --- a/typedapi/types/analyticscollection.go +++ b/typedapi/types/analyticscollection.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // AnalyticsCollection type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/search_application/_types/BehavioralAnalytics.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/search_application/_types/BehavioralAnalytics.ts#L22-L27 type AnalyticsCollection struct { // EventDataStream Data stream for the collection. EventDataStream EventDataStream `json:"event_data_stream"` diff --git a/typedapi/types/analyticsstatistics.go b/typedapi/types/analyticsstatistics.go index dc27ff35d0..08b84fdcc6 100644 --- a/typedapi/types/analyticsstatistics.go +++ b/typedapi/types/analyticsstatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AnalyticsStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L61-L71 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L61-L71 type AnalyticsStatistics struct { BoxplotUsage int64 `json:"boxplot_usage"` CumulativeCardinalityUsage int64 `json:"cumulative_cardinality_usage"` @@ -60,7 +60,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { switch t { case "boxplot_usage": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -75,7 +75,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { } case "cumulative_cardinality_usage": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -90,7 +90,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { } case "moving_percentiles_usage": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -105,7 +105,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { } case "multi_terms_usage": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -120,7 +120,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { } case "normalize_usage": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -135,7 +135,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { } case "rate_usage": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -150,7 +150,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { } case "string_stats_usage": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -165,7 +165,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { } case "t_test_usage": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -180,7 +180,7 @@ func (s *AnalyticsStatistics) UnmarshalJSON(data []byte) error { } case "top_metrics_usage": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/analyzedetail.go b/typedapi/types/analyzedetail.go index db5aff230e..a9d6c29e14 100644 --- a/typedapi/types/analyzedetail.go +++ b/typedapi/types/analyzedetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AnalyzeDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/analyze/types.ts#L24-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/analyze/types.ts#L24-L30 type AnalyzeDetail struct { Analyzer *AnalyzerDetail `json:"analyzer,omitempty"` Charfilters []CharFilterDetail `json:"charfilters,omitempty"` @@ -66,7 +66,7 @@ func (s *AnalyzeDetail) UnmarshalJSON(data []byte) error { } case "custom_analyzer": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/analyzer.go b/typedapi/types/analyzer.go index 729aaf4403..c344015dea 100644 --- a/typedapi/types/analyzer.go +++ b/typedapi/types/analyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -37,5 +37,5 @@ package types // SnowballAnalyzer // DutchAnalyzer // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/analyzers.ts#L113-L131 -type Analyzer interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/analyzers.ts#L113-L131 +type Analyzer any diff --git a/typedapi/types/analyzerdetail.go b/typedapi/types/analyzerdetail.go index 305bbc5a47..c5e89d322f 100644 --- a/typedapi/types/analyzerdetail.go +++ b/typedapi/types/analyzerdetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AnalyzerDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/analyze/types.ts#L32-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/analyze/types.ts#L32-L35 type AnalyzerDetail struct { Name string `json:"name"` Tokens []ExplainAnalyzeToken `json:"tokens"` diff --git a/typedapi/types/analyzetoken.go b/typedapi/types/analyzetoken.go index 237f3d4e0f..226abc090d 100644 --- a/typedapi/types/analyzetoken.go +++ b/typedapi/types/analyzetoken.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AnalyzeToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/analyze/types.ts#L37-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/analyze/types.ts#L37-L44 type AnalyzeToken struct { EndOffset int64 `json:"end_offset"` Position int64 `json:"position"` @@ -57,7 +57,7 @@ func (s *AnalyzeToken) UnmarshalJSON(data []byte) error { switch t { case "end_offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *AnalyzeToken) UnmarshalJSON(data []byte) error { } case "position": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *AnalyzeToken) UnmarshalJSON(data []byte) error { } case "positionLength": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -102,7 +102,7 @@ func (s *AnalyzeToken) UnmarshalJSON(data []byte) error { } case "start_offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/anomaly.go b/typedapi/types/anomaly.go index b11d91f81b..2a2867d642 100644 --- a/typedapi/types/anomaly.go +++ b/typedapi/types/anomaly.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Anomaly type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Anomaly.ts#L24-L121 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Anomaly.ts#L24-L121 type Anomaly struct { // Actual The actual value for the bucket. Actual []Float64 `json:"actual,omitempty"` @@ -173,7 +173,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { case "detector_index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -234,7 +234,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { } case "initial_record_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -250,7 +250,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { } case "is_interim": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -324,7 +324,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { s.PartitionFieldValue = &o case "probability": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -340,7 +340,7 @@ func (s *Anomaly) UnmarshalJSON(data []byte) error { } case "record_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/anomalycause.go b/typedapi/types/anomalycause.go index 7b29387934..d8eddfbe5e 100644 --- a/typedapi/types/anomalycause.go +++ b/typedapi/types/anomalycause.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AnomalyCause type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Anomaly.ts#L123-L138 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Anomaly.ts#L123-L138 type AnomalyCause struct { Actual []Float64 `json:"actual"` ByFieldName string `json:"by_field_name"` @@ -174,7 +174,7 @@ func (s *AnomalyCause) UnmarshalJSON(data []byte) error { s.PartitionFieldValue = o case "probability": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/anomalydetectors.go b/typedapi/types/anomalydetectors.go index 8da34752b5..c0f5f3db0a 100644 --- a/typedapi/types/anomalydetectors.go +++ b/typedapi/types/anomalydetectors.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AnomalyDetectors type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/info/types.ts#L44-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/info/types.ts#L44-L50 type AnomalyDetectors struct { CategorizationAnalyzer CategorizationAnalyzer `json:"categorization_analyzer"` CategorizationExamplesLimit int `json:"categorization_examples_limit"` @@ -77,7 +77,7 @@ func (s *AnomalyDetectors) UnmarshalJSON(data []byte) error { case "categorization_examples_limit": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -93,7 +93,7 @@ func (s *AnomalyDetectors) UnmarshalJSON(data []byte) error { case "daily_model_snapshot_retention_after_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -121,7 +121,7 @@ func (s *AnomalyDetectors) UnmarshalJSON(data []byte) error { case "model_snapshot_retention_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/anomalyexplanation.go b/typedapi/types/anomalyexplanation.go index 3171b7d726..c3175dbbec 100644 --- a/typedapi/types/anomalyexplanation.go +++ b/typedapi/types/anomalyexplanation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AnomalyExplanation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Anomaly.ts#L156-L197 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Anomaly.ts#L156-L197 type AnomalyExplanation struct { // AnomalyCharacteristicsImpact Impact from the duration and magnitude of the detected anomaly relative to // the historical average. @@ -76,7 +76,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { case "anomaly_characteristics_impact": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { case "anomaly_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -119,7 +119,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { s.AnomalyType = &o case "high_variance_penalty": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -133,7 +133,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { } case "incomplete_bucket_penalty": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -147,7 +147,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { } case "lower_confidence_bound": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -164,7 +164,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { case "multi_bucket_impact": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -180,7 +180,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { case "single_bucket_impact": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -195,7 +195,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { } case "typical_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -211,7 +211,7 @@ func (s *AnomalyExplanation) UnmarshalJSON(data []byte) error { } case "upper_confidence_bound": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/apikey.go b/typedapi/types/apikey.go index 689ea5de39..6e9f0120d9 100644 --- a/typedapi/types/apikey.go +++ b/typedapi/types/apikey.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ApiKey type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/ApiKey.ts#L27-L77 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/ApiKey.ts#L27-L89 type ApiKey struct { // Creation Creation time for the API key in milliseconds. Creation *int64 `json:"creation,omitempty"` @@ -52,8 +52,13 @@ type ApiKey struct { Metadata Metadata `json:"metadata,omitempty"` // Name Name of the API key. Name string `json:"name"` + // ProfileUid The profile uid for the API key owner principal, if requested and if it + // exists + ProfileUid *string `json:"profile_uid,omitempty"` // Realm Realm name of the principal for which this API key was created. Realm *string `json:"realm,omitempty"` + // RealmType Realm type of the principal for which this API key was created + RealmType *string `json:"realm_type,omitempty"` // RoleDescriptors The role descriptors assigned to this API key when it was created or last // updated. // An empty role descriptor means the API key inherits the owner user’s @@ -80,7 +85,7 @@ func (s *ApiKey) UnmarshalJSON(data []byte) error { switch t { case "creation": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +100,7 @@ func (s *ApiKey) UnmarshalJSON(data []byte) error { } case "expiration": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -115,7 +120,7 @@ func (s *ApiKey) UnmarshalJSON(data []byte) error { } case "invalidated": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -143,6 +148,18 @@ func (s *ApiKey) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Name", err) } + case "profile_uid": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "ProfileUid", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.ProfileUid = &o + case "realm": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { @@ -155,6 +172,18 @@ func (s *ApiKey) UnmarshalJSON(data []byte) error { } s.Realm = &o + case "realm_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "RealmType", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.RealmType = &o + case "role_descriptors": if s.RoleDescriptors == nil { s.RoleDescriptors = make(map[string]RoleDescriptor, 0) diff --git a/typedapi/types/apikeyaggregate.go b/typedapi/types/apikeyaggregate.go index 3d01cbe6ed..114e20f24e 100644 --- a/typedapi/types/apikeyaggregate.go +++ b/typedapi/types/apikeyaggregate.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types -// APIKeyAggregate holds the union for the following types: +// ApiKeyAggregate holds the union for the following types: // // CardinalityAggregate // ValueCountAggregate @@ -36,5 +36,5 @@ package types // DateRangeAggregate // CompositeAggregate // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/query_api_keys/types.ts#L123-L140 -type APIKeyAggregate interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/query_api_keys/types.ts#L123-L140 +type ApiKeyAggregate any diff --git a/typedapi/types/apikeyaggregationcontainer.go b/typedapi/types/apikeyaggregationcontainer.go index 186cac791d..9ce2aea5ed 100644 --- a/typedapi/types/apikeyaggregationcontainer.go +++ b/typedapi/types/apikeyaggregationcontainer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -28,13 +28,13 @@ import ( "io" ) -// APIKeyAggregationContainer type. +// ApiKeyAggregationContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/query_api_keys/types.ts#L64-L121 -type APIKeyAggregationContainer struct { +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/query_api_keys/types.ts#L64-L121 +type ApiKeyAggregationContainer struct { // Aggregations Sub-aggregations for this aggregation. // Only applies to bucket aggregations. - Aggregations map[string]APIKeyAggregationContainer `json:"aggregations,omitempty"` + Aggregations map[string]ApiKeyAggregationContainer `json:"aggregations,omitempty"` // Cardinality A single-value metrics aggregation that calculates an approximate count of // distinct values. Cardinality *CardinalityAggregation `json:"cardinality,omitempty"` @@ -49,10 +49,10 @@ type APIKeyAggregationContainer struct { DateRange *DateRangeAggregation `json:"date_range,omitempty"` // Filter A single bucket aggregation that narrows the set of documents to those that // match a query. - Filter *APIKeyQueryContainer `json:"filter,omitempty"` + Filter *ApiKeyQueryContainer `json:"filter,omitempty"` // Filters A multi-bucket aggregation where each bucket contains the documents that // match a query. - Filters *APIKeyFiltersAggregation `json:"filters,omitempty"` + Filters *ApiKeyFiltersAggregation `json:"filters,omitempty"` Meta Metadata `json:"meta,omitempty"` Missing *MissingAggregation `json:"missing,omitempty"` // Range A multi-bucket value source based aggregation that enables the user to define @@ -66,7 +66,7 @@ type APIKeyAggregationContainer struct { ValueCount *ValueCountAggregation `json:"value_count,omitempty"` } -func (s *APIKeyAggregationContainer) UnmarshalJSON(data []byte) error { +func (s *ApiKeyAggregationContainer) UnmarshalJSON(data []byte) error { dec := json.NewDecoder(bytes.NewReader(data)) @@ -83,7 +83,7 @@ func (s *APIKeyAggregationContainer) UnmarshalJSON(data []byte) error { case "aggregations", "aggs": if s.Aggregations == nil { - s.Aggregations = make(map[string]APIKeyAggregationContainer, 0) + s.Aggregations = make(map[string]ApiKeyAggregationContainer, 0) } if err := dec.Decode(&s.Aggregations); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -144,10 +144,10 @@ func (s *APIKeyAggregationContainer) UnmarshalJSON(data []byte) error { return nil } -// NewAPIKeyAggregationContainer returns a APIKeyAggregationContainer. -func NewAPIKeyAggregationContainer() *APIKeyAggregationContainer { - r := &APIKeyAggregationContainer{ - Aggregations: make(map[string]APIKeyAggregationContainer, 0), +// NewApiKeyAggregationContainer returns a ApiKeyAggregationContainer. +func NewApiKeyAggregationContainer() *ApiKeyAggregationContainer { + r := &ApiKeyAggregationContainer{ + Aggregations: make(map[string]ApiKeyAggregationContainer, 0), } return r diff --git a/typedapi/types/apikeyauthorization.go b/typedapi/types/apikeyauthorization.go index b6255bd4ec..6fedcf31dc 100644 --- a/typedapi/types/apikeyauthorization.go +++ b/typedapi/types/apikeyauthorization.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ApiKeyAuthorization type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Authorization.ts#L20-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Authorization.ts#L20-L29 type ApiKeyAuthorization struct { // Id The identifier for the API key. Id string `json:"id"` diff --git a/typedapi/types/apikeyfiltersaggregation.go b/typedapi/types/apikeyfiltersaggregation.go index 6a225ac72b..9e0c855771 100644 --- a/typedapi/types/apikeyfiltersaggregation.go +++ b/typedapi/types/apikeyfiltersaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -29,17 +29,15 @@ import ( "strconv" ) -// APIKeyFiltersAggregation type. +// ApiKeyFiltersAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/query_api_keys/types.ts#L208-L228 -type APIKeyFiltersAggregation struct { +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/query_api_keys/types.ts#L208-L228 +type ApiKeyFiltersAggregation struct { // Filters Collection of queries from which to build buckets. - Filters BucketsAPIKeyQueryContainer `json:"filters,omitempty"` + Filters BucketsApiKeyQueryContainer `json:"filters,omitempty"` // Keyed By default, the named filters aggregation returns the buckets as an object. // Set to `false` to return the buckets as an array of objects. - Keyed *bool `json:"keyed,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + Keyed *bool `json:"keyed,omitempty"` // OtherBucket Set to `true` to add a bucket to the response which will contain all // documents that do not match any of the given filters. OtherBucket *bool `json:"other_bucket,omitempty"` @@ -47,7 +45,7 @@ type APIKeyFiltersAggregation struct { OtherBucketKey *string `json:"other_bucket_key,omitempty"` } -func (s *APIKeyFiltersAggregation) UnmarshalJSON(data []byte) error { +func (s *ApiKeyFiltersAggregation) UnmarshalJSON(data []byte) error { dec := json.NewDecoder(bytes.NewReader(data)) @@ -70,13 +68,13 @@ func (s *APIKeyFiltersAggregation) UnmarshalJSON(data []byte) error { localDec := json.NewDecoder(source) switch rawMsg[0] { case '{': - o := make(map[string]APIKeyQueryContainer, 0) + o := make(map[string]ApiKeyQueryContainer, 0) if err := localDec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Filters", err) } s.Filters = o case '[': - o := []APIKeyQueryContainer{} + o := []ApiKeyQueryContainer{} if err := localDec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Filters", err) } @@ -84,7 +82,7 @@ func (s *APIKeyFiltersAggregation) UnmarshalJSON(data []byte) error { } case "keyed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -97,25 +95,8 @@ func (s *APIKeyFiltersAggregation) UnmarshalJSON(data []byte) error { s.Keyed = &v } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "other_bucket": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -145,9 +126,9 @@ func (s *APIKeyFiltersAggregation) UnmarshalJSON(data []byte) error { return nil } -// NewAPIKeyFiltersAggregation returns a APIKeyFiltersAggregation. -func NewAPIKeyFiltersAggregation() *APIKeyFiltersAggregation { - r := &APIKeyFiltersAggregation{} +// NewApiKeyFiltersAggregation returns a ApiKeyFiltersAggregation. +func NewApiKeyFiltersAggregation() *ApiKeyFiltersAggregation { + r := &ApiKeyFiltersAggregation{} return r } diff --git a/typedapi/types/apikeyquerycontainer.go b/typedapi/types/apikeyquerycontainer.go index f7c12f8154..d76dc3d9eb 100644 --- a/typedapi/types/apikeyquerycontainer.go +++ b/typedapi/types/apikeyquerycontainer.go @@ -16,14 +16,22 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types -// APIKeyQueryContainer type. +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" +) + +// ApiKeyQueryContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/query_api_keys/types.ts#L142-L206 -type APIKeyQueryContainer struct { +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/query_api_keys/types.ts#L142-L206 +type ApiKeyQueryContainer struct { // Bool matches documents matching boolean combinations of other queries. Bool *BoolQuery `json:"bool,omitempty"` // Exists Returns documents that contain an indexed value for a field. @@ -55,9 +63,99 @@ type APIKeyQueryContainer struct { Wildcard map[string]WildcardQuery `json:"wildcard,omitempty"` } -// NewAPIKeyQueryContainer returns a APIKeyQueryContainer. -func NewAPIKeyQueryContainer() *APIKeyQueryContainer { - r := &APIKeyQueryContainer{ +func (s *ApiKeyQueryContainer) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "bool": + if err := dec.Decode(&s.Bool); err != nil { + return fmt.Errorf("%s | %w", "Bool", err) + } + + case "exists": + if err := dec.Decode(&s.Exists); err != nil { + return fmt.Errorf("%s | %w", "Exists", err) + } + + case "ids": + if err := dec.Decode(&s.Ids); err != nil { + return fmt.Errorf("%s | %w", "Ids", err) + } + + case "match": + if s.Match == nil { + s.Match = make(map[string]MatchQuery, 0) + } + if err := dec.Decode(&s.Match); err != nil { + return fmt.Errorf("%s | %w", "Match", err) + } + + case "match_all": + if err := dec.Decode(&s.MatchAll); err != nil { + return fmt.Errorf("%s | %w", "MatchAll", err) + } + + case "prefix": + if s.Prefix == nil { + s.Prefix = make(map[string]PrefixQuery, 0) + } + if err := dec.Decode(&s.Prefix); err != nil { + return fmt.Errorf("%s | %w", "Prefix", err) + } + + case "range": + if s.Range == nil { + s.Range = make(map[string]RangeQuery, 0) + } + if err := dec.Decode(&s.Range); err != nil { + return fmt.Errorf("%s | %w", "Range", err) + } + + case "simple_query_string": + if err := dec.Decode(&s.SimpleQueryString); err != nil { + return fmt.Errorf("%s | %w", "SimpleQueryString", err) + } + + case "term": + if s.Term == nil { + s.Term = make(map[string]TermQuery, 0) + } + if err := dec.Decode(&s.Term); err != nil { + return fmt.Errorf("%s | %w", "Term", err) + } + + case "terms": + if err := dec.Decode(&s.Terms); err != nil { + return fmt.Errorf("%s | %w", "Terms", err) + } + + case "wildcard": + if s.Wildcard == nil { + s.Wildcard = make(map[string]WildcardQuery, 0) + } + if err := dec.Decode(&s.Wildcard); err != nil { + return fmt.Errorf("%s | %w", "Wildcard", err) + } + + } + } + return nil +} + +// NewApiKeyQueryContainer returns a ApiKeyQueryContainer. +func NewApiKeyQueryContainer() *ApiKeyQueryContainer { + r := &ApiKeyQueryContainer{ Match: make(map[string]MatchQuery, 0), Prefix: make(map[string]PrefixQuery, 0), Range: make(map[string]RangeQuery, 0), diff --git a/typedapi/types/appendprocessor.go b/typedapi/types/appendprocessor.go index 67dcc51ac1..bb7ee5cbcc 100644 --- a/typedapi/types/appendprocessor.go +++ b/typedapi/types/appendprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AppendProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L279-L294 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L279-L294 type AppendProcessor struct { // AllowDuplicates If `false`, the processor does not append values already present in the // field. @@ -71,7 +71,7 @@ func (s *AppendProcessor) UnmarshalJSON(data []byte) error { switch t { case "allow_duplicates": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *AppendProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/applicationglobaluserprivileges.go b/typedapi/types/applicationglobaluserprivileges.go index 712272105d..95f5621cf0 100644 --- a/typedapi/types/applicationglobaluserprivileges.go +++ b/typedapi/types/applicationglobaluserprivileges.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ApplicationGlobalUserPrivileges type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/Privileges.ts#L193-L195 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/Privileges.ts#L333-L335 type ApplicationGlobalUserPrivileges struct { Manage ManageUserPrivileges `json:"manage"` } diff --git a/typedapi/types/applicationprivileges.go b/typedapi/types/applicationprivileges.go index c949f8e9c3..7d16487dec 100644 --- a/typedapi/types/applicationprivileges.go +++ b/typedapi/types/applicationprivileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ApplicationPrivileges type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/Privileges.ts#L26-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/Privileges.ts#L26-L39 type ApplicationPrivileges struct { // Application The name of the application to which this entry applies. Application string `json:"application"` diff --git a/typedapi/types/applicationprivilegescheck.go b/typedapi/types/applicationprivilegescheck.go index 0011ee70fc..9e4433a44f 100644 --- a/typedapi/types/applicationprivilegescheck.go +++ b/typedapi/types/applicationprivilegescheck.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ApplicationPrivilegesCheck type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/has_privileges/types.ts#L24-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/has_privileges/types.ts#L24-L31 type ApplicationPrivilegesCheck struct { // Application The name of the application. Application string `json:"application"` diff --git a/typedapi/types/applicationsprivileges.go b/typedapi/types/applicationsprivileges.go index 0e4d43d572..bcb7db7722 100644 --- a/typedapi/types/applicationsprivileges.go +++ b/typedapi/types/applicationsprivileges.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ApplicationsPrivileges type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/has_privileges/types.ts#L46-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/has_privileges/types.ts#L46-L46 type ApplicationsPrivileges map[string]ResourcePrivileges diff --git a/typedapi/types/archive.go b/typedapi/types/archive.go index d6faa09a7e..98074fb7b0 100644 --- a/typedapi/types/archive.go +++ b/typedapi/types/archive.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Archive type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L48-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L48-L50 type Archive struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -54,7 +54,7 @@ func (s *Archive) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -68,7 +68,7 @@ func (s *Archive) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -82,7 +82,7 @@ func (s *Archive) UnmarshalJSON(data []byte) error { } case "indices_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/arraycomparecondition.go b/typedapi/types/arraycomparecondition.go index 36fb12651e..65253bb74b 100644 --- a/typedapi/types/arraycomparecondition.go +++ b/typedapi/types/arraycomparecondition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // ArrayCompareCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Conditions.ts#L32-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Conditions.ts#L32-L39 type ArrayCompareCondition struct { ArrayCompareCondition map[conditionop.ConditionOp]ArrayCompareOpParams `json:"-"` Path string `json:"path"` @@ -93,7 +93,7 @@ func (s *ArrayCompareCondition) UnmarshalJSON(data []byte) error { func (s ArrayCompareCondition) MarshalJSON() ([]byte, error) { type opt ArrayCompareCondition // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/arraycompareopparams.go b/typedapi/types/arraycompareopparams.go index 060ddc3f96..ffca2a90b7 100644 --- a/typedapi/types/arraycompareopparams.go +++ b/typedapi/types/arraycompareopparams.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // ArrayCompareOpParams type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Conditions.ts#L27-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Conditions.ts#L27-L30 type ArrayCompareOpParams struct { Quantifier quantifier.Quantifier `json:"quantifier"` Value FieldValue `json:"value"` diff --git a/typedapi/types/arraypercentilesitem.go b/typedapi/types/arraypercentilesitem.go index 3758c96eaf..8f8d04c358 100644 --- a/typedapi/types/arraypercentilesitem.go +++ b/typedapi/types/arraypercentilesitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,11 +31,11 @@ import ( // ArrayPercentilesItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L160-L164 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L160-L164 type ArrayPercentilesItem struct { - Key string `json:"key"` - Value Float64 `json:"value,omitempty"` - ValueAsString *string `json:"value_as_string,omitempty"` + Key string `json:"key"` + Value *Float64 `json:"value,omitempty"` + ValueAsString *string `json:"value_as_string,omitempty"` } func (s *ArrayPercentilesItem) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/asciifoldingtokenfilter.go b/typedapi/types/asciifoldingtokenfilter.go index 74f8b70335..63cec6ed84 100644 --- a/typedapi/types/asciifoldingtokenfilter.go +++ b/typedapi/types/asciifoldingtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // AsciiFoldingTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L168-L171 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L170-L173 type AsciiFoldingTokenFilter struct { PreserveOriginal Stringifiedboolean `json:"preserve_original,omitempty"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/asyncsearch.go b/typedapi/types/asyncsearch.go index 2e77caa27e..121649b2f2 100644 --- a/typedapi/types/asyncsearch.go +++ b/typedapi/types/asyncsearch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // AsyncSearch type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/async_search/_types/AsyncSearch.ts#L30-L56 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/async_search/_types/AsyncSearch.ts#L30-L56 type AsyncSearch struct { // Aggregations Partial aggregations results, coming from the shards that have already // completed the execution of the query. @@ -530,7 +530,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -580,7 +580,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -590,7 +590,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -618,7 +618,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { } case "max_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -634,7 +634,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { } case "num_reduce_phases": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -712,7 +712,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -722,7 +722,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Suggest") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -732,7 +732,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { } case "terminated_early": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -746,7 +746,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { } case "timed_out": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -760,7 +760,7 @@ func (s *AsyncSearch) UnmarshalJSON(data []byte) error { } case "took": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/attachmentprocessor.go b/typedapi/types/attachmentprocessor.go index dc14bf2977..d43cca335d 100644 --- a/typedapi/types/attachmentprocessor.go +++ b/typedapi/types/attachmentprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AttachmentProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L296-L337 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L296-L337 type AttachmentProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -115,7 +115,7 @@ func (s *AttachmentProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -129,7 +129,7 @@ func (s *AttachmentProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -143,7 +143,7 @@ func (s *AttachmentProcessor) UnmarshalJSON(data []byte) error { } case "indexed_chars": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -173,7 +173,7 @@ func (s *AttachmentProcessor) UnmarshalJSON(data []byte) error { } case "remove_binary": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/audit.go b/typedapi/types/audit.go index 1383f32ccf..09f244c459 100644 --- a/typedapi/types/audit.go +++ b/typedapi/types/audit.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Audit type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L73-L75 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L73-L75 type Audit struct { Enabled bool `json:"enabled"` Outputs []string `json:"outputs,omitempty"` @@ -53,7 +53,7 @@ func (s *Audit) UnmarshalJSON(data []byte) error { switch t { case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/authenticateduser.go b/typedapi/types/authenticateduser.go index 14df47670f..555b0c21f4 100644 --- a/typedapi/types/authenticateduser.go +++ b/typedapi/types/authenticateduser.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,14 +31,14 @@ import ( // AuthenticatedUser type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_token/types.ts#L40-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_token/types.ts#L40-L45 type AuthenticatedUser struct { AuthenticationProvider *AuthenticationProvider `json:"authentication_provider,omitempty"` AuthenticationRealm UserRealm `json:"authentication_realm"` AuthenticationType string `json:"authentication_type"` - Email string `json:"email,omitempty"` + Email *string `json:"email,omitempty"` Enabled bool `json:"enabled"` - FullName string `json:"full_name,omitempty"` + FullName *string `json:"full_name,omitempty"` LookupRealm UserRealm `json:"lookup_realm"` Metadata Metadata `json:"metadata"` ProfileUid *string `json:"profile_uid,omitempty"` @@ -93,10 +93,10 @@ func (s *AuthenticatedUser) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.Email = o + s.Email = &o case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/authenticatetoken.go b/typedapi/types/authenticatetoken.go index 301a44e74b..bd74e8fcd3 100644 --- a/typedapi/types/authenticatetoken.go +++ b/typedapi/types/authenticatetoken.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AuthenticateToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/authenticate/types.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/authenticate/types.ts#L22-L29 type AuthenticateToken struct { Name string `json:"name"` Type *string `json:"type,omitempty"` diff --git a/typedapi/types/authenticationprovider.go b/typedapi/types/authenticationprovider.go index 380e6f1551..eb144e52db 100644 --- a/typedapi/types/authenticationprovider.go +++ b/typedapi/types/authenticationprovider.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AuthenticationProvider type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_token/types.ts#L35-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_token/types.ts#L35-L38 type AuthenticationProvider struct { Name string `json:"name"` Type string `json:"type"` diff --git a/typedapi/types/autodatehistogramaggregate.go b/typedapi/types/autodatehistogramaggregate.go index 73d4771081..c01ec3a573 100644 --- a/typedapi/types/autodatehistogramaggregate.go +++ b/typedapi/types/autodatehistogramaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // AutoDateHistogramAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L356-L360 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L358-L362 type AutoDateHistogramAggregate struct { Buckets BucketsDateHistogramBucket `json:"buckets"` Interval string `json:"interval"` diff --git a/typedapi/types/autodatehistogramaggregation.go b/typedapi/types/autodatehistogramaggregation.go index 5aa24b9929..14e8176e64 100644 --- a/typedapi/types/autodatehistogramaggregation.go +++ b/typedapi/types/autodatehistogramaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // AutoDateHistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L65-L100 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L67-L102 type AutoDateHistogramAggregation struct { // Buckets The target number of buckets. Buckets *int `json:"buckets,omitempty"` @@ -42,8 +42,7 @@ type AutoDateHistogramAggregation struct { // Format The date format used to format `key_as_string` in the response. // If no `format` is specified, the first date format specified in the field // mapping is used. - Format *string `json:"format,omitempty"` - Meta Metadata `json:"meta,omitempty"` + Format *string `json:"format,omitempty"` // MinimumInterval The minimum rounding interval. // This can make the collection process more efficient, as the aggregation will // not attempt to round at any interval lower than `minimum_interval`. @@ -51,7 +50,6 @@ type AutoDateHistogramAggregation struct { // Missing The value to apply to documents that do not have a value. // By default, documents without a value are ignored. Missing DateTime `json:"missing,omitempty"` - Name *string `json:"name,omitempty"` // Offset Time zone specified as a ISO 8601 UTC offset. Offset *string `json:"offset,omitempty"` Params map[string]json.RawMessage `json:"params,omitempty"` @@ -77,7 +75,7 @@ func (s *AutoDateHistogramAggregation) UnmarshalJSON(data []byte) error { case "buckets": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -108,11 +106,6 @@ func (s *AutoDateHistogramAggregation) UnmarshalJSON(data []byte) error { } s.Format = &o - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "minimum_interval": if err := dec.Decode(&s.MinimumInterval); err != nil { return fmt.Errorf("%s | %w", "MinimumInterval", err) @@ -123,18 +116,6 @@ func (s *AutoDateHistogramAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Missing", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "offset": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { diff --git a/typedapi/types/autofollowedcluster.go b/typedapi/types/autofollowedcluster.go index 2ebdd963b2..b58c3049db 100644 --- a/typedapi/types/autofollowedcluster.go +++ b/typedapi/types/autofollowedcluster.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // AutoFollowedCluster type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/stats/types.ts.ts#L27-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/stats/types.ts.ts#L27-L31 type AutoFollowedCluster struct { ClusterName string `json:"cluster_name"` LastSeenMetadataVersion int64 `json:"last_seen_metadata_version"` diff --git a/typedapi/types/autofollowpattern.go b/typedapi/types/autofollowpattern.go index f29ba86067..b6998ec385 100644 --- a/typedapi/types/autofollowpattern.go +++ b/typedapi/types/autofollowpattern.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // AutoFollowPattern type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/get_auto_follow_pattern/types.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/get_auto_follow_pattern/types.ts#L23-L26 type AutoFollowPattern struct { Name string `json:"name"` Pattern AutoFollowPatternSummary `json:"pattern"` diff --git a/typedapi/types/autofollowpatternsummary.go b/typedapi/types/autofollowpatternsummary.go index 855074a1bf..f7aa59ddc0 100644 --- a/typedapi/types/autofollowpatternsummary.go +++ b/typedapi/types/autofollowpatternsummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AutoFollowPatternSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/get_auto_follow_pattern/types.ts#L28-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/get_auto_follow_pattern/types.ts#L28-L52 type AutoFollowPatternSummary struct { Active bool `json:"active"` // FollowIndexPattern The name of follower index. @@ -64,7 +64,7 @@ func (s *AutoFollowPatternSummary) UnmarshalJSON(data []byte) error { switch t { case "active": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -94,7 +94,7 @@ func (s *AutoFollowPatternSummary) UnmarshalJSON(data []byte) error { case "max_outstanding_read_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/autofollowstats.go b/typedapi/types/autofollowstats.go index 5740eaada9..5f09917ca5 100644 --- a/typedapi/types/autofollowstats.go +++ b/typedapi/types/autofollowstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AutoFollowStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/stats/types.ts.ts#L33-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/stats/types.ts.ts#L33-L39 type AutoFollowStats struct { AutoFollowedClusters []AutoFollowedCluster `json:"auto_followed_clusters"` NumberOfFailedFollowIndices int64 `json:"number_of_failed_follow_indices"` @@ -61,7 +61,7 @@ func (s *AutoFollowStats) UnmarshalJSON(data []byte) error { } case "number_of_failed_follow_indices": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -76,7 +76,7 @@ func (s *AutoFollowStats) UnmarshalJSON(data []byte) error { } case "number_of_failed_remote_cluster_state_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *AutoFollowStats) UnmarshalJSON(data []byte) error { } case "number_of_successful_follow_indices": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/autoscalingcapacity.go b/typedapi/types/autoscalingcapacity.go index 1ef951d2b4..31351bddbd 100644 --- a/typedapi/types/autoscalingcapacity.go +++ b/typedapi/types/autoscalingcapacity.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // AutoscalingCapacity type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L38-L41 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L38-L41 type AutoscalingCapacity struct { Node AutoscalingResources `json:"node"` Total AutoscalingResources `json:"total"` diff --git a/typedapi/types/autoscalingdecider.go b/typedapi/types/autoscalingdecider.go index c218ff72db..c574322931 100644 --- a/typedapi/types/autoscalingdecider.go +++ b/typedapi/types/autoscalingdecider.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AutoscalingDecider type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L52-L56 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L52-L56 type AutoscalingDecider struct { ReasonDetails json.RawMessage `json:"reason_details,omitempty"` ReasonSummary *string `json:"reason_summary,omitempty"` diff --git a/typedapi/types/autoscalingdeciders.go b/typedapi/types/autoscalingdeciders.go index 1cab8b65c0..514cc19a30 100644 --- a/typedapi/types/autoscalingdeciders.go +++ b/typedapi/types/autoscalingdeciders.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // AutoscalingDeciders type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L31-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L31-L36 type AutoscalingDeciders struct { CurrentCapacity AutoscalingCapacity `json:"current_capacity"` CurrentNodes []AutoscalingNode `json:"current_nodes"` diff --git a/typedapi/types/autoscalingnode.go b/typedapi/types/autoscalingnode.go index 05f48c61ac..e9a9f308a5 100644 --- a/typedapi/types/autoscalingnode.go +++ b/typedapi/types/autoscalingnode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // AutoscalingNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L48-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L48-L50 type AutoscalingNode struct { Name string `json:"name"` } diff --git a/typedapi/types/autoscalingpolicy.go b/typedapi/types/autoscalingpolicy.go index 866bdb08a3..91c1b88ba3 100644 --- a/typedapi/types/autoscalingpolicy.go +++ b/typedapi/types/autoscalingpolicy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // AutoscalingPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/autoscaling/_types/AutoscalingPolicy.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/autoscaling/_types/AutoscalingPolicy.ts#L23-L27 type AutoscalingPolicy struct { // Deciders Decider settings Deciders map[string]json.RawMessage `json:"deciders"` diff --git a/typedapi/types/autoscalingresources.go b/typedapi/types/autoscalingresources.go index d8788f3a0a..07d677cf0c 100644 --- a/typedapi/types/autoscalingresources.go +++ b/typedapi/types/autoscalingresources.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AutoscalingResources type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L43-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/autoscaling/get_autoscaling_capacity/GetAutoscalingCapacityResponse.ts#L43-L46 type AutoscalingResources struct { Memory int `json:"memory"` Storage int `json:"storage"` @@ -54,7 +54,7 @@ func (s *AutoscalingResources) UnmarshalJSON(data []byte) error { case "memory": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *AutoscalingResources) UnmarshalJSON(data []byte) error { case "storage": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/averageaggregation.go b/typedapi/types/averageaggregation.go index 65fef2dda3..826904f6d3 100644 --- a/typedapi/types/averageaggregation.go +++ b/typedapi/types/averageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L55-L55 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L55-L55 type AverageAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` diff --git a/typedapi/types/averagebucketaggregation.go b/typedapi/types/averagebucketaggregation.go index f88f76b566..c38bace4c6 100644 --- a/typedapi/types/averagebucketaggregation.go +++ b/typedapi/types/averagebucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // AverageBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L78-L78 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L78-L78 type AverageBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,8 +43,6 @@ type AverageBucketAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` } func (s *AverageBucketAggregation) UnmarshalJSON(data []byte) error { @@ -84,23 +82,6 @@ func (s *AverageBucketAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/avgaggregate.go b/typedapi/types/avgaggregate.go index d62edf3dcb..f572cc78b2 100644 --- a/typedapi/types/avgaggregate.go +++ b/typedapi/types/avgaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,14 +31,14 @@ import ( // AvgAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L209-L210 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L209-L210 type AvgAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. - Value Float64 `json:"value,omitempty"` - ValueAsString *string `json:"value_as_string,omitempty"` + Value *Float64 `json:"value,omitempty"` + ValueAsString *string `json:"value_as_string,omitempty"` } func (s *AvgAggregate) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/azurerepository.go b/typedapi/types/azurerepository.go index a508c736e5..57d62cec78 100644 --- a/typedapi/types/azurerepository.go +++ b/typedapi/types/azurerepository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // AzureRepository type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotRepository.ts#L40-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotRepository.ts#L40-L43 type AzureRepository struct { Settings AzureRepositorySettings `json:"settings"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/azurerepositorysettings.go b/typedapi/types/azurerepositorysettings.go index 7b0b7299b3..15ac74ba74 100644 --- a/typedapi/types/azurerepositorysettings.go +++ b/typedapi/types/azurerepositorysettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // AzureRepositorySettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotRepository.ts#L77-L83 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotRepository.ts#L77-L83 type AzureRepositorySettings struct { BasePath *string `json:"base_path,omitempty"` ChunkSize ByteSize `json:"chunk_size,omitempty"` @@ -89,7 +89,7 @@ func (s *AzureRepositorySettings) UnmarshalJSON(data []byte) error { s.Client = &o case "compress": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -137,7 +137,7 @@ func (s *AzureRepositorySettings) UnmarshalJSON(data []byte) error { } case "readonly": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/base.go b/typedapi/types/base.go index dc0c451eb7..55f453c3dc 100644 --- a/typedapi/types/base.go +++ b/typedapi/types/base.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Base type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L30-L33 type Base struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -53,7 +53,7 @@ func (s *Base) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -67,7 +67,7 @@ func (s *Base) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/baseindicator.go b/typedapi/types/baseindicator.go index 0443604e34..a4b5912808 100644 --- a/typedapi/types/baseindicator.go +++ b/typedapi/types/baseindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // BaseIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L42-L47 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L42-L47 type BaseIndicator struct { Diagnosis []Diagnosis `json:"diagnosis,omitempty"` Impacts []Impact `json:"impacts,omitempty"` diff --git a/typedapi/types/basenode.go b/typedapi/types/basenode.go index d9140ff794..6494ece110 100644 --- a/typedapi/types/basenode.go +++ b/typedapi/types/basenode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // BaseNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_spec_utils/BaseNode.ts#L25-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_spec_utils/BaseNode.ts#L25-L32 type BaseNode struct { Attributes map[string]string `json:"attributes"` Host string `json:"host"` diff --git a/typedapi/types/binaryproperty.go b/typedapi/types/binaryproperty.go index 601430ba5c..a22d8231b5 100644 --- a/typedapi/types/binaryproperty.go +++ b/typedapi/types/binaryproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // BinaryProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L49-L51 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L55-L57 type BinaryProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -80,7 +80,7 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -105,7 +105,7 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -126,7 +126,7 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -216,12 +216,6 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -240,6 +234,18 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -396,6 +402,12 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -407,7 +419,7 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -436,7 +448,7 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -457,7 +469,7 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -547,12 +559,6 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -571,6 +577,18 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -727,6 +745,12 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -749,7 +773,7 @@ func (s *BinaryProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/booleanproperty.go b/typedapi/types/booleanproperty.go index d540a65b57..364e962129 100644 --- a/typedapi/types/booleanproperty.go +++ b/typedapi/types/booleanproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // BooleanProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L53-L59 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L59-L65 type BooleanProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -68,7 +68,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -130,7 +130,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -151,7 +151,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -241,12 +241,6 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -265,6 +259,18 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -421,6 +427,12 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -432,7 +444,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -447,7 +459,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -469,7 +481,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { } case "null_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -489,7 +501,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -510,7 +522,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -600,12 +612,6 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -624,6 +630,18 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -780,6 +798,12 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -802,7 +826,7 @@ func (s *BooleanProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/boolquery.go b/typedapi/types/boolquery.go index 712b3e97dd..07d8f3d481 100644 --- a/typedapi/types/boolquery.go +++ b/typedapi/types/boolquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // BoolQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L28-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L29-L53 type BoolQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -72,7 +72,7 @@ func (s *BoolQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/boostingquery.go b/typedapi/types/boostingquery.go index 9e9e9f7b8c..15eaf2ca12 100644 --- a/typedapi/types/boostingquery.go +++ b/typedapi/types/boostingquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // BoostingQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L54-L67 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L55-L68 type BoostingQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -65,7 +65,7 @@ func (s *BoostingQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -86,7 +86,7 @@ func (s *BoostingQuery) UnmarshalJSON(data []byte) error { } case "negative_boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/boxplotaggregate.go b/typedapi/types/boxplotaggregate.go index 6de0ece053..f731677bbb 100644 --- a/typedapi/types/boxplotaggregate.go +++ b/typedapi/types/boxplotaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // BoxPlotAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L706-L722 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L713-L729 type BoxPlotAggregate struct { Lower Float64 `json:"lower"` LowerAsString *string `json:"lower_as_string,omitempty"` @@ -66,7 +66,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { switch t { case "lower": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -94,7 +94,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { s.LowerAsString = &o case "max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -127,7 +127,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { } case "min": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -155,7 +155,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { s.MinAsString = &o case "q1": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -183,7 +183,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { s.Q1AsString = &o case "q2": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -211,7 +211,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { s.Q2AsString = &o case "q3": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -239,7 +239,7 @@ func (s *BoxPlotAggregate) UnmarshalJSON(data []byte) error { s.Q3AsString = &o case "upper": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/boxplotaggregation.go b/typedapi/types/boxplotaggregation.go index 068624a123..9008ea1b6a 100644 --- a/typedapi/types/boxplotaggregation.go +++ b/typedapi/types/boxplotaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // BoxplotAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L57-L62 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L57-L62 type BoxplotAggregation struct { // Compression Limits the maximum number of nodes used by the underlying TDigest algorithm // to `20 * compression`, enabling control of memory usage and approximation @@ -61,7 +61,7 @@ func (s *BoxplotAggregation) UnmarshalJSON(data []byte) error { switch t { case "compression": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/breaker.go b/typedapi/types/breaker.go index bde434391a..fe7f553bad 100644 --- a/typedapi/types/breaker.go +++ b/typedapi/types/breaker.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Breaker type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L434-L459 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L434-L459 type Breaker struct { // EstimatedSize Estimated memory used for the operation. EstimatedSize *string `json:"estimated_size,omitempty"` @@ -77,7 +77,7 @@ func (s *Breaker) UnmarshalJSON(data []byte) error { s.EstimatedSize = &o case "estimated_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -104,7 +104,7 @@ func (s *Breaker) UnmarshalJSON(data []byte) error { s.LimitSize = &o case "limit_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -119,7 +119,7 @@ func (s *Breaker) UnmarshalJSON(data []byte) error { } case "overhead": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -135,7 +135,7 @@ func (s *Breaker) UnmarshalJSON(data []byte) error { } case "tripped": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/bucketcorrelationaggregation.go b/typedapi/types/bucketcorrelationaggregation.go index 94ea584f8e..d2fbd21bb0 100644 --- a/typedapi/types/bucketcorrelationaggregation.go +++ b/typedapi/types/bucketcorrelationaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,19 +26,16 @@ import ( "errors" "fmt" "io" - "strconv" ) // BucketCorrelationAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L129-L135 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L129-L135 type BucketCorrelationAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` // Function The correlation function to execute. Function BucketCorrelationFunction `json:"function"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` } func (s *BucketCorrelationAggregation) UnmarshalJSON(data []byte) error { @@ -66,23 +63,6 @@ func (s *BucketCorrelationAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Function", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/bucketcorrelationfunction.go b/typedapi/types/bucketcorrelationfunction.go index d827bf1c7c..8f7a533a0b 100644 --- a/typedapi/types/bucketcorrelationfunction.go +++ b/typedapi/types/bucketcorrelationfunction.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // BucketCorrelationFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L137-L142 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L137-L142 type BucketCorrelationFunction struct { // CountCorrelation The configuration to calculate a count correlation. This function is designed // for determining the correlation of a term value and a given metric. diff --git a/typedapi/types/bucketcorrelationfunctioncountcorrelation.go b/typedapi/types/bucketcorrelationfunctioncountcorrelation.go index 190938a8ec..a0fc03dbba 100644 --- a/typedapi/types/bucketcorrelationfunctioncountcorrelation.go +++ b/typedapi/types/bucketcorrelationfunctioncountcorrelation.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // BucketCorrelationFunctionCountCorrelation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L144-L147 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L144-L147 type BucketCorrelationFunctionCountCorrelation struct { // Indicator The indicator with which to correlate the configured `bucket_path` values. Indicator BucketCorrelationFunctionCountCorrelationIndicator `json:"indicator"` diff --git a/typedapi/types/bucketcorrelationfunctioncountcorrelationindicator.go b/typedapi/types/bucketcorrelationfunctioncountcorrelationindicator.go index 55c2a50383..d4a7cff175 100644 --- a/typedapi/types/bucketcorrelationfunctioncountcorrelationindicator.go +++ b/typedapi/types/bucketcorrelationfunctioncountcorrelationindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // BucketCorrelationFunctionCountCorrelationIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L149-L167 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L149-L167 type BucketCorrelationFunctionCountCorrelationIndicator struct { // DocCount The total number of documents that initially created the expectations. It’s // required to be greater @@ -69,7 +69,7 @@ func (s *BucketCorrelationFunctionCountCorrelationIndicator) UnmarshalJSON(data case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/bucketinfluencer.go b/typedapi/types/bucketinfluencer.go index e6aab43d29..9c308c69df 100644 --- a/typedapi/types/bucketinfluencer.go +++ b/typedapi/types/bucketinfluencer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // BucketInfluencer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Bucket.ts#L80-L128 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Bucket.ts#L80-L128 type BucketInfluencer struct { // AnomalyScore A normalized score between 0-100, which is calculated for each bucket // influencer. This score might be updated as @@ -83,7 +83,7 @@ func (s *BucketInfluencer) UnmarshalJSON(data []byte) error { switch t { case "anomaly_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -109,7 +109,7 @@ func (s *BucketInfluencer) UnmarshalJSON(data []byte) error { } case "initial_anomaly_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *BucketInfluencer) UnmarshalJSON(data []byte) error { } case "is_interim": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -144,7 +144,7 @@ func (s *BucketInfluencer) UnmarshalJSON(data []byte) error { } case "probability": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -160,7 +160,7 @@ func (s *BucketInfluencer) UnmarshalJSON(data []byte) error { } case "raw_anomaly_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/bucketksaggregation.go b/typedapi/types/bucketksaggregation.go index 01fbdee27e..1a4deea902 100644 --- a/typedapi/types/bucketksaggregation.go +++ b/typedapi/types/bucketksaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // BucketKsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L94-L127 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L94-L127 type BucketKsAggregation struct { // Alternative A list of string values indicating which K-S test alternative to calculate. // The valid values @@ -52,8 +52,6 @@ type BucketKsAggregation struct { // one used equal percentiles of a // metric to define the bucket end points. Fractions []Float64 `json:"fractions,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` // SamplingMethod Indicates the sampling methodology when calculating the K-S test. Note, this // is sampling of the returned values. // This determines the cumulative distribution function (CDF) points used @@ -94,23 +92,6 @@ func (s *BucketKsAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Fractions", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "sampling_method": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { diff --git a/typedapi/types/bucketmetricvalueaggregate.go b/typedapi/types/bucketmetricvalueaggregate.go index 92ca5fae73..daaa921c2d 100644 --- a/typedapi/types/bucketmetricvalueaggregate.go +++ b/typedapi/types/bucketmetricvalueaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,15 +31,15 @@ import ( // BucketMetricValueAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L233-L236 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L233-L236 type BucketMetricValueAggregate struct { Keys []string `json:"keys"` Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. - Value Float64 `json:"value,omitempty"` - ValueAsString *string `json:"value_as_string,omitempty"` + Value *Float64 `json:"value,omitempty"` + ValueAsString *string `json:"value_as_string,omitempty"` } func (s *BucketMetricValueAggregate) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/bucketpathaggregation.go b/typedapi/types/bucketpathaggregation.go index eccb389522..b84b940305 100644 --- a/typedapi/types/bucketpathaggregation.go +++ b/typedapi/types/bucketpathaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,17 +26,14 @@ import ( "errors" "fmt" "io" - "strconv" ) // BucketPathAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L31-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L31-L37 type BucketPathAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` } func (s *BucketPathAggregation) UnmarshalJSON(data []byte) error { @@ -59,23 +56,6 @@ func (s *BucketPathAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "BucketsPath", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/bucketsadjacencymatrixbucket.go b/typedapi/types/bucketsadjacencymatrixbucket.go index ae85d814e3..d6c97ca577 100644 --- a/typedapi/types/bucketsadjacencymatrixbucket.go +++ b/typedapi/types/bucketsadjacencymatrixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]AdjacencyMatrixBucket // []AdjacencyMatrixBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsAdjacencyMatrixBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsAdjacencyMatrixBucket any diff --git a/typedapi/types/bucketsapikeyquerycontainer.go b/typedapi/types/bucketsapikeyquerycontainer.go index 887d64274e..8e4246880f 100644 --- a/typedapi/types/bucketsapikeyquerycontainer.go +++ b/typedapi/types/bucketsapikeyquerycontainer.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types -// BucketsAPIKeyQueryContainer holds the union for the following types: +// BucketsApiKeyQueryContainer holds the union for the following types: // -// map[string]APIKeyQueryContainer -// []APIKeyQueryContainer +// map[string]ApiKeyQueryContainer +// []ApiKeyQueryContainer // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsAPIKeyQueryContainer interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsApiKeyQueryContainer any diff --git a/typedapi/types/bucketscompositebucket.go b/typedapi/types/bucketscompositebucket.go index 77d330dcf1..5648ffe0d5 100644 --- a/typedapi/types/bucketscompositebucket.go +++ b/typedapi/types/bucketscompositebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]CompositeBucket // []CompositeBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsCompositeBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsCompositeBucket any diff --git a/typedapi/types/bucketscriptaggregation.go b/typedapi/types/bucketscriptaggregation.go index 844c072b49..146fe32c3d 100644 --- a/typedapi/types/bucketscriptaggregation.go +++ b/typedapi/types/bucketscriptaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // BucketScriptAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L80-L85 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L80-L85 type BucketScriptAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,8 +43,6 @@ type BucketScriptAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` // Script The script to run for this aggregation. Script Script `json:"script,omitempty"` } @@ -86,23 +84,6 @@ func (s *BucketScriptAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { diff --git a/typedapi/types/bucketsdatehistogrambucket.go b/typedapi/types/bucketsdatehistogrambucket.go index 7976ad5ad9..931a469ebf 100644 --- a/typedapi/types/bucketsdatehistogrambucket.go +++ b/typedapi/types/bucketsdatehistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]DateHistogramBucket // []DateHistogramBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsDateHistogramBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsDateHistogramBucket any diff --git a/typedapi/types/bucketsdoubletermsbucket.go b/typedapi/types/bucketsdoubletermsbucket.go index 0c99e6a1da..08f3939a94 100644 --- a/typedapi/types/bucketsdoubletermsbucket.go +++ b/typedapi/types/bucketsdoubletermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]DoubleTermsBucket // []DoubleTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsDoubleTermsBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsDoubleTermsBucket any diff --git a/typedapi/types/bucketselectoraggregation.go b/typedapi/types/bucketselectoraggregation.go index 2db79fa751..3065de3606 100644 --- a/typedapi/types/bucketselectoraggregation.go +++ b/typedapi/types/bucketselectoraggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // BucketSelectorAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L87-L92 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L87-L92 type BucketSelectorAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,8 +43,6 @@ type BucketSelectorAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` // Script The script to run for this aggregation. Script Script `json:"script,omitempty"` } @@ -86,23 +84,6 @@ func (s *BucketSelectorAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { diff --git a/typedapi/types/bucketsfiltersbucket.go b/typedapi/types/bucketsfiltersbucket.go index 307c714def..a56f5bf5ed 100644 --- a/typedapi/types/bucketsfiltersbucket.go +++ b/typedapi/types/bucketsfiltersbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]FiltersBucket // []FiltersBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsFiltersBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsFiltersBucket any diff --git a/typedapi/types/bucketsfrequentitemsetsbucket.go b/typedapi/types/bucketsfrequentitemsetsbucket.go index 9e2b5e993c..e8cf954b11 100644 --- a/typedapi/types/bucketsfrequentitemsetsbucket.go +++ b/typedapi/types/bucketsfrequentitemsetsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]FrequentItemSetsBucket // []FrequentItemSetsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsFrequentItemSetsBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsFrequentItemSetsBucket any diff --git a/typedapi/types/bucketsgeohashgridbucket.go b/typedapi/types/bucketsgeohashgridbucket.go index 9a5c48fb97..d378aecc0b 100644 --- a/typedapi/types/bucketsgeohashgridbucket.go +++ b/typedapi/types/bucketsgeohashgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]GeoHashGridBucket // []GeoHashGridBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsGeoHashGridBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsGeoHashGridBucket any diff --git a/typedapi/types/bucketsgeohexgridbucket.go b/typedapi/types/bucketsgeohexgridbucket.go index d432b66b39..5cfba2f8b1 100644 --- a/typedapi/types/bucketsgeohexgridbucket.go +++ b/typedapi/types/bucketsgeohexgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]GeoHexGridBucket // []GeoHexGridBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsGeoHexGridBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsGeoHexGridBucket any diff --git a/typedapi/types/bucketsgeotilegridbucket.go b/typedapi/types/bucketsgeotilegridbucket.go index 8242a094ad..547962158f 100644 --- a/typedapi/types/bucketsgeotilegridbucket.go +++ b/typedapi/types/bucketsgeotilegridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]GeoTileGridBucket // []GeoTileGridBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsGeoTileGridBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsGeoTileGridBucket any diff --git a/typedapi/types/bucketshistogrambucket.go b/typedapi/types/bucketshistogrambucket.go index b750202935..df6ac2371a 100644 --- a/typedapi/types/bucketshistogrambucket.go +++ b/typedapi/types/bucketshistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]HistogramBucket // []HistogramBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsHistogramBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsHistogramBucket any diff --git a/typedapi/types/bucketsipprefixbucket.go b/typedapi/types/bucketsipprefixbucket.go index 30328604de..b4367e5ef6 100644 --- a/typedapi/types/bucketsipprefixbucket.go +++ b/typedapi/types/bucketsipprefixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]IpPrefixBucket // []IpPrefixBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsIpPrefixBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsIpPrefixBucket any diff --git a/typedapi/types/bucketsiprangebucket.go b/typedapi/types/bucketsiprangebucket.go index ebcd28af23..c9994bc42c 100644 --- a/typedapi/types/bucketsiprangebucket.go +++ b/typedapi/types/bucketsiprangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]IpRangeBucket // []IpRangeBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsIpRangeBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsIpRangeBucket any diff --git a/typedapi/types/bucketslongraretermsbucket.go b/typedapi/types/bucketslongraretermsbucket.go index b150a39d6c..16bfcbfe7f 100644 --- a/typedapi/types/bucketslongraretermsbucket.go +++ b/typedapi/types/bucketslongraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]LongRareTermsBucket // []LongRareTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsLongRareTermsBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsLongRareTermsBucket any diff --git a/typedapi/types/bucketslongtermsbucket.go b/typedapi/types/bucketslongtermsbucket.go index a0bb08f45a..06784e15ca 100644 --- a/typedapi/types/bucketslongtermsbucket.go +++ b/typedapi/types/bucketslongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]LongTermsBucket // []LongTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsLongTermsBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsLongTermsBucket any diff --git a/typedapi/types/bucketsmultitermsbucket.go b/typedapi/types/bucketsmultitermsbucket.go index daf1499679..a2f4f25456 100644 --- a/typedapi/types/bucketsmultitermsbucket.go +++ b/typedapi/types/bucketsmultitermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]MultiTermsBucket // []MultiTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsMultiTermsBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsMultiTermsBucket any diff --git a/typedapi/types/bucketsortaggregation.go b/typedapi/types/bucketsortaggregation.go index 7719bd6ae9..ae777d9587 100644 --- a/typedapi/types/bucketsortaggregation.go +++ b/typedapi/types/bucketsortaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,14 +33,12 @@ import ( // BucketSortAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L169-L190 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L169-L190 type BucketSortAggregation struct { // From Buckets in positions prior to `from` will be truncated. From *int `json:"from,omitempty"` // GapPolicy The policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` // Size The number of buckets to return. // Defaults to all buckets of the parent aggregation. Size *int `json:"size,omitempty"` @@ -65,7 +63,7 @@ func (s *BucketSortAggregation) UnmarshalJSON(data []byte) error { case "from": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -84,26 +82,9 @@ func (s *BucketSortAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/bucketspath.go b/typedapi/types/bucketspath.go index bb73db0bc5..ea361bdb79 100644 --- a/typedapi/types/bucketspath.go +++ b/typedapi/types/bucketspath.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,5 +26,5 @@ package types // []string // map[string]string // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L53-L59 -type BucketsPath interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L53-L59 +type BucketsPath any diff --git a/typedapi/types/bucketsquery.go b/typedapi/types/bucketsquery.go index 8d25ad5876..2a912bcce6 100644 --- a/typedapi/types/bucketsquery.go +++ b/typedapi/types/bucketsquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]Query // []Query // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsQuery interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsQuery any diff --git a/typedapi/types/bucketsrangebucket.go b/typedapi/types/bucketsrangebucket.go index 1b13de6230..20a3283b6b 100644 --- a/typedapi/types/bucketsrangebucket.go +++ b/typedapi/types/bucketsrangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]RangeBucket // []RangeBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsRangeBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsRangeBucket any diff --git a/typedapi/types/bucketssignificantlongtermsbucket.go b/typedapi/types/bucketssignificantlongtermsbucket.go index 55e17edda4..b72330c426 100644 --- a/typedapi/types/bucketssignificantlongtermsbucket.go +++ b/typedapi/types/bucketssignificantlongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]SignificantLongTermsBucket // []SignificantLongTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsSignificantLongTermsBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsSignificantLongTermsBucket any diff --git a/typedapi/types/bucketssignificantstringtermsbucket.go b/typedapi/types/bucketssignificantstringtermsbucket.go index 4d3aaf9e3d..462b22169e 100644 --- a/typedapi/types/bucketssignificantstringtermsbucket.go +++ b/typedapi/types/bucketssignificantstringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]SignificantStringTermsBucket // []SignificantStringTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsSignificantStringTermsBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsSignificantStringTermsBucket any diff --git a/typedapi/types/bucketsstringraretermsbucket.go b/typedapi/types/bucketsstringraretermsbucket.go index 24cd81c1f3..e5e396162d 100644 --- a/typedapi/types/bucketsstringraretermsbucket.go +++ b/typedapi/types/bucketsstringraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]StringRareTermsBucket // []StringRareTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsStringRareTermsBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsStringRareTermsBucket any diff --git a/typedapi/types/bucketsstringtermsbucket.go b/typedapi/types/bucketsstringtermsbucket.go index 715a1d58df..fc091f314f 100644 --- a/typedapi/types/bucketsstringtermsbucket.go +++ b/typedapi/types/bucketsstringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]StringTermsBucket // []StringTermsBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsStringTermsBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsStringTermsBucket any diff --git a/typedapi/types/bucketsummary.go b/typedapi/types/bucketsummary.go index e00e532be3..616beecebe 100644 --- a/typedapi/types/bucketsummary.go +++ b/typedapi/types/bucketsummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // BucketSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Bucket.ts#L31-L78 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Bucket.ts#L31-L78 type BucketSummary struct { // AnomalyScore The maximum anomaly score, between 0-100, for any of the bucket influencers. // This is an overall, rate-limited @@ -85,7 +85,7 @@ func (s *BucketSummary) UnmarshalJSON(data []byte) error { switch t { case "anomaly_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -111,7 +111,7 @@ func (s *BucketSummary) UnmarshalJSON(data []byte) error { } case "event_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -126,7 +126,7 @@ func (s *BucketSummary) UnmarshalJSON(data []byte) error { } case "initial_anomaly_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -142,7 +142,7 @@ func (s *BucketSummary) UnmarshalJSON(data []byte) error { } case "is_interim": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/bucketsvariablewidthhistogrambucket.go b/typedapi/types/bucketsvariablewidthhistogrambucket.go index bb7bcccc10..6dd954d54e 100644 --- a/typedapi/types/bucketsvariablewidthhistogrambucket.go +++ b/typedapi/types/bucketsvariablewidthhistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // map[string]VariableWidthHistogramBucket // []VariableWidthHistogramBucket // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsVariableWidthHistogramBucket interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsVariableWidthHistogramBucket any diff --git a/typedapi/types/bucketsvoid.go b/typedapi/types/bucketsvoid.go index 608737b503..59f22e449c 100644 --- a/typedapi/types/bucketsvoid.go +++ b/typedapi/types/bucketsvoid.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // BucketsVoid holds the union for the following types: // -// map[string]interface{} -// []interface{} +// map[string]any +// []any // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L316-L325 -type BucketsVoid interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L316-L325 +type BucketsVoid any diff --git a/typedapi/types/buildinformation.go b/typedapi/types/buildinformation.go index 2115b5918e..90164e207a 100644 --- a/typedapi/types/buildinformation.go +++ b/typedapi/types/buildinformation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // BuildInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/info/types.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/info/types.ts#L24-L27 type BuildInformation struct { Date DateTime `json:"date"` Hash string `json:"hash"` diff --git a/typedapi/types/bulkindexbyscrollfailure.go b/typedapi/types/bulkindexbyscrollfailure.go index 7ca81615f5..68d48bd9cf 100644 --- a/typedapi/types/bulkindexbyscrollfailure.go +++ b/typedapi/types/bulkindexbyscrollfailure.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // BulkIndexByScrollFailure type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Errors.ts#L58-L64 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Errors.ts#L60-L66 type BulkIndexByScrollFailure struct { Cause ErrorCause `json:"cause"` Id string `json:"id"` @@ -72,7 +72,7 @@ func (s *BulkIndexByScrollFailure) UnmarshalJSON(data []byte) error { case "status": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/bulkstats.go b/typedapi/types/bulkstats.go index a0504e5afa..4c1fb4b141 100644 --- a/typedapi/types/bulkstats.go +++ b/typedapi/types/bulkstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // BulkStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L68-L78 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L68-L78 type BulkStats struct { AvgSize ByteSize `json:"avg_size,omitempty"` AvgSizeInBytes int64 `json:"avg_size_in_bytes"` @@ -65,7 +65,7 @@ func (s *BulkStats) UnmarshalJSON(data []byte) error { } case "avg_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -90,7 +90,7 @@ func (s *BulkStats) UnmarshalJSON(data []byte) error { } case "total_operations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -110,7 +110,7 @@ func (s *BulkStats) UnmarshalJSON(data []byte) error { } case "total_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/bytenumberproperty.go b/typedapi/types/bytenumberproperty.go index 5a766eddc3..a3396b9f75 100644 --- a/typedapi/types/bytenumberproperty.go +++ b/typedapi/types/bytenumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // ByteNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L164-L167 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L172-L175 type ByteNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -79,7 +79,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -150,7 +150,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -171,7 +171,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -261,12 +261,6 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -285,6 +279,18 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -441,6 +447,12 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -452,7 +464,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -467,7 +479,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -481,7 +493,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +531,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -540,7 +552,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -630,12 +642,6 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -654,6 +660,18 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -810,6 +828,12 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -868,7 +892,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -882,7 +906,7 @@ func (s *ByteNumberProperty) UnmarshalJSON(data []byte) error { } case "time_series_dimension": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/bytesize.go b/typedapi/types/bytesize.go index 8f7d08e4e5..69b929ada8 100644 --- a/typedapi/types/bytesize.go +++ b/typedapi/types/bytesize.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // int64 // string // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L96-L97 -type ByteSize interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L96-L97 +type ByteSize any diff --git a/typedapi/types/bytesprocessor.go b/typedapi/types/bytesprocessor.go index 5c3da7c4e3..5112ae158f 100644 --- a/typedapi/types/bytesprocessor.go +++ b/typedapi/types/bytesprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // BytesProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L392-L408 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L392-L408 type BytesProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -100,7 +100,7 @@ func (s *BytesProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *BytesProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/cachequeries.go b/typedapi/types/cachequeries.go index 85aa0f8a1d..3f89bbfb9f 100644 --- a/typedapi/types/cachequeries.go +++ b/typedapi/types/cachequeries.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CacheQueries type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L405-L407 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L407-L409 type CacheQueries struct { Enabled bool `json:"enabled"` } @@ -52,7 +52,7 @@ func (s *CacheQueries) UnmarshalJSON(data []byte) error { switch t { case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/cachestats.go b/typedapi/types/cachestats.go index f736e21fba..e07b6047fd 100644 --- a/typedapi/types/cachestats.go +++ b/typedapi/types/cachestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CacheStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/enrich/stats/types.ts#L37-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/enrich/stats/types.ts#L37-L43 type CacheStats struct { Count int `json:"count"` Evictions int `json:"evictions"` @@ -57,7 +57,7 @@ func (s *CacheStats) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -73,7 +73,7 @@ func (s *CacheStats) UnmarshalJSON(data []byte) error { case "evictions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -89,7 +89,7 @@ func (s *CacheStats) UnmarshalJSON(data []byte) error { case "hits": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -105,7 +105,7 @@ func (s *CacheStats) UnmarshalJSON(data []byte) error { case "misses": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/calendar.go b/typedapi/types/calendar.go index ee63a9be45..09b407d252 100644 --- a/typedapi/types/calendar.go +++ b/typedapi/types/calendar.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Calendar type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_calendars/types.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_calendars/types.ts#L22-L29 type Calendar struct { // CalendarId A string that uniquely identifies a calendar. CalendarId string `json:"calendar_id"` diff --git a/typedapi/types/calendarevent.go b/typedapi/types/calendarevent.go index d71ec4dba0..92d2e45644 100644 --- a/typedapi/types/calendarevent.go +++ b/typedapi/types/calendarevent.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CalendarEvent type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/CalendarEvent.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/CalendarEvent.ts#L23-L33 type CalendarEvent struct { // CalendarId A string that uniquely identifies a calendar. CalendarId *string `json:"calendar_id,omitempty"` diff --git a/typedapi/types/cardinalityaggregate.go b/typedapi/types/cardinalityaggregate.go index d79298388a..b817a92e02 100644 --- a/typedapi/types/cardinalityaggregate.go +++ b/typedapi/types/cardinalityaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CardinalityAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L138-L141 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L138-L141 type CardinalityAggregate struct { Meta Metadata `json:"meta,omitempty"` Value int64 `json:"value"` @@ -58,7 +58,7 @@ func (s *CardinalityAggregate) UnmarshalJSON(data []byte) error { } case "value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/cardinalityaggregation.go b/typedapi/types/cardinalityaggregation.go index fa3a8306e0..342c4ba0e6 100644 --- a/typedapi/types/cardinalityaggregation.go +++ b/typedapi/types/cardinalityaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // CardinalityAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L87-L99 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L87-L99 type CardinalityAggregation struct { // ExecutionHint Mechanism by which cardinality aggregations is run. ExecutionHint *cardinalityexecutionmode.CardinalityExecutionMode `json:"execution_hint,omitempty"` @@ -81,7 +81,7 @@ func (s *CardinalityAggregation) UnmarshalJSON(data []byte) error { case "precision_threshold": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *CardinalityAggregation) UnmarshalJSON(data []byte) error { } case "rehash": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/catanonalydetectorcolumns.go b/typedapi/types/catanonalydetectorcolumns.go index a4837a52c3..b115fd050b 100644 --- a/typedapi/types/catanonalydetectorcolumns.go +++ b/typedapi/types/catanonalydetectorcolumns.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,5 +26,5 @@ import ( // CatAnonalyDetectorColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/_types/CatBase.ts#L402-L404 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/_types/CatBase.ts#L402-L404 type CatAnonalyDetectorColumns []catanomalydetectorcolumn.CatAnomalyDetectorColumn diff --git a/typedapi/types/catcomponenttemplate.go b/typedapi/types/catcomponenttemplate.go index 27385bde69..74ebbe9a11 100644 --- a/typedapi/types/catcomponenttemplate.go +++ b/typedapi/types/catcomponenttemplate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CatComponentTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/component_templates/types.ts#L20-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/component_templates/types.ts#L20-L28 type CatComponentTemplate struct { AliasCount string `json:"alias_count"` IncludedIn string `json:"included_in"` diff --git a/typedapi/types/catdatafeedcolumns.go b/typedapi/types/catdatafeedcolumns.go index 8db6166f29..de9e8fd6c2 100644 --- a/typedapi/types/catdatafeedcolumns.go +++ b/typedapi/types/catdatafeedcolumns.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,5 +26,5 @@ import ( // CatDatafeedColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/_types/CatBase.ts#L559-L559 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/_types/CatBase.ts#L559-L559 type CatDatafeedColumns []catdatafeedcolumn.CatDatafeedColumn diff --git a/typedapi/types/catdfacolumns.go b/typedapi/types/catdfacolumns.go index 759484b2b7..3e3884b2ab 100644 --- a/typedapi/types/catdfacolumns.go +++ b/typedapi/types/catdfacolumns.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,5 +26,5 @@ import ( // CatDfaColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/_types/CatBase.ts#L558-L558 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/_types/CatBase.ts#L558-L558 type CatDfaColumns []catdfacolumn.CatDfaColumn diff --git a/typedapi/types/categorizationanalyzer.go b/typedapi/types/categorizationanalyzer.go index b0bdd7c475..1b7429811a 100644 --- a/typedapi/types/categorizationanalyzer.go +++ b/typedapi/types/categorizationanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // string // CategorizationAnalyzerDefinition // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Analysis.ts#L181-L182 -type CategorizationAnalyzer interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Analysis.ts#L181-L182 +type CategorizationAnalyzer any diff --git a/typedapi/types/categorizationanalyzerdefinition.go b/typedapi/types/categorizationanalyzerdefinition.go index 401fd0de8c..21a0579957 100644 --- a/typedapi/types/categorizationanalyzerdefinition.go +++ b/typedapi/types/categorizationanalyzerdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // CategorizationAnalyzerDefinition type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Analysis.ts#L184-L197 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Analysis.ts#L184-L197 type CategorizationAnalyzerDefinition struct { // CharFilter One or more character filters. In addition to the built-in character filters, // other plugins can provide more character filters. If this property is not @@ -123,7 +123,7 @@ func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { } s.CharFilter = append(s.CharFilter, *o) default: - o := new(interface{}) + o := new(any) if err := localDec.Decode(&o); err != nil { return err } @@ -131,7 +131,7 @@ func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { } default: source := bytes.NewReader(rawMsg) - o := new(interface{}) + o := new(any) if err := json.NewDecoder(source).Decode(&o); err != nil { return fmt.Errorf("%s | %w", "CharFilter", err) } @@ -439,7 +439,7 @@ func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { } s.Filter = append(s.Filter, *o) default: - o := new(interface{}) + o := new(any) if err := localDec.Decode(&o); err != nil { return err } @@ -447,7 +447,7 @@ func (s *CategorizationAnalyzerDefinition) UnmarshalJSON(data []byte) error { } default: source := bytes.NewReader(rawMsg) - o := new(interface{}) + o := new(any) if err := json.NewDecoder(source).Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Filter", err) } diff --git a/typedapi/types/categorizetextaggregation.go b/typedapi/types/categorizetextaggregation.go index f9cad38a33..192e1c1536 100644 --- a/typedapi/types/categorizetextaggregation.go +++ b/typedapi/types/categorizetextaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CategorizeTextAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L1037-L1101 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L1045-L1109 type CategorizeTextAggregation struct { // CategorizationAnalyzer The categorization analyzer specifies how the text is analyzed and tokenized // before being categorized. @@ -68,11 +68,9 @@ type CategorizeTextAggregation struct { // Smaller values use less memory and create fewer categories. Larger values // will use more memory and // create narrower categories. Max allowed value is 100. - MaxUniqueTokens *int `json:"max_unique_tokens,omitempty"` - Meta Metadata `json:"meta,omitempty"` + MaxUniqueTokens *int `json:"max_unique_tokens,omitempty"` // MinDocCount The minimum number of documents in a bucket to be returned to the results. - MinDocCount *int `json:"min_doc_count,omitempty"` - Name *string `json:"name,omitempty"` + MinDocCount *int `json:"min_doc_count,omitempty"` // ShardMinDocCount The minimum number of documents in a bucket to be returned from the shard // before merging. ShardMinDocCount *int `json:"shard_min_doc_count,omitempty"` @@ -136,7 +134,7 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { case "max_matched_tokens": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -152,7 +150,7 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { case "max_unique_tokens": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -166,14 +164,9 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { s.MaxUniqueTokens = &f } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "min_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -187,21 +180,9 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { s.MinDocCount = &f } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "shard_min_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -217,7 +198,7 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { case "shard_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -233,7 +214,7 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { case "similarity_threshold": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -249,7 +230,7 @@ func (s *CategorizeTextAggregation) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/categorizetextanalyzer.go b/typedapi/types/categorizetextanalyzer.go index 78d08afde3..997d2968c3 100644 --- a/typedapi/types/categorizetextanalyzer.go +++ b/typedapi/types/categorizetextanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // string // CustomCategorizeTextAnalyzer // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L1103-L1106 -type CategorizeTextAnalyzer interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L1111-L1114 +type CategorizeTextAnalyzer any diff --git a/typedapi/types/category.go b/typedapi/types/category.go index 194f569a5d..4c461694d9 100644 --- a/typedapi/types/category.go +++ b/typedapi/types/category.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Category type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Category.ts#L23-L49 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Category.ts#L23-L49 type Category struct { // CategoryId A unique identifier for the category. category_id is unique at the job level, // even when per-partition categorization is enabled. @@ -137,7 +137,7 @@ func (s *Category) UnmarshalJSON(data []byte) error { s.Mlcategory = o case "num_matches": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/cattrainedmodelscolumns.go b/typedapi/types/cattrainedmodelscolumns.go index 8ef5e4c625..adb1318229 100644 --- a/typedapi/types/cattrainedmodelscolumns.go +++ b/typedapi/types/cattrainedmodelscolumns.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,5 +26,5 @@ import ( // CatTrainedModelsColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/_types/CatBase.ts#L636-L638 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/_types/CatBase.ts#L636-L638 type CatTrainedModelsColumns []cattrainedmodelscolumn.CatTrainedModelsColumn diff --git a/typedapi/types/cattransformcolumns.go b/typedapi/types/cattransformcolumns.go index 56f846eaf9..f80d4b24cd 100644 --- a/typedapi/types/cattransformcolumns.go +++ b/typedapi/types/cattransformcolumns.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,5 +26,5 @@ import ( // CatTransformColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/_types/CatBase.ts#L845-L845 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/_types/CatBase.ts#L845-L845 type CatTransformColumns []cattransformcolumn.CatTransformColumn diff --git a/typedapi/types/ccr.go b/typedapi/types/ccr.go index 1a7290ea1e..dc8844c704 100644 --- a/typedapi/types/ccr.go +++ b/typedapi/types/ccr.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Ccr type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L334-L337 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L334-L337 type Ccr struct { AutoFollowPatternsCount int `json:"auto_follow_patterns_count"` Available bool `json:"available"` @@ -56,7 +56,7 @@ func (s *Ccr) UnmarshalJSON(data []byte) error { case "auto_follow_patterns_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *Ccr) UnmarshalJSON(data []byte) error { } case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *Ccr) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *Ccr) UnmarshalJSON(data []byte) error { case "follower_indices_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/ccrshardstats.go b/typedapi/types/ccrshardstats.go index ea9b7a01eb..766263b093 100644 --- a/typedapi/types/ccrshardstats.go +++ b/typedapi/types/ccrshardstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CcrShardStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/_types/FollowIndexStats.ts#L35-L69 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/_types/FollowIndexStats.ts#L35-L69 type CcrShardStats struct { BytesRead int64 `json:"bytes_read"` FailedReadRequests int64 `json:"failed_read_requests"` @@ -84,7 +84,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { switch t { case "bytes_read": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -99,7 +99,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { } case "failed_read_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { } case "failed_write_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -139,7 +139,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { } case "follower_global_checkpoint": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -186,7 +186,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { } case "leader_global_checkpoint": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -218,7 +218,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { } case "operations_read": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -233,7 +233,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { } case "operations_written": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -249,7 +249,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case "outstanding_read_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -265,7 +265,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case "outstanding_write_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -298,7 +298,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { case "shard_id": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -313,7 +313,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { } case "successful_read_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -328,7 +328,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { } case "successful_write_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -383,7 +383,7 @@ func (s *CcrShardStats) UnmarshalJSON(data []byte) error { } case "write_buffer_operation_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/certificateinformation.go b/typedapi/types/certificateinformation.go index e438099f5d..2943e4c7b7 100644 --- a/typedapi/types/certificateinformation.go +++ b/typedapi/types/certificateinformation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,9 +31,9 @@ import ( // CertificateInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ssl/certificates/types.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ssl/certificates/types.ts#L22-L31 type CertificateInformation struct { - Alias string `json:"alias,omitempty"` + Alias *string `json:"alias,omitempty"` Expiry DateTime `json:"expiry"` Format string `json:"format"` HasPrivateKey bool `json:"has_private_key"` @@ -68,7 +68,7 @@ func (s *CertificateInformation) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.Alias = o + s.Alias = &o case "expiry": if err := dec.Decode(&s.Expiry); err != nil { @@ -88,7 +88,7 @@ func (s *CertificateInformation) UnmarshalJSON(data []byte) error { s.Format = o case "has_private_key": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/cgroup.go b/typedapi/types/cgroup.go index ce84368e51..90b48339ea 100644 --- a/typedapi/types/cgroup.go +++ b/typedapi/types/cgroup.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Cgroup type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L461-L474 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L461-L474 type Cgroup struct { // Cpu Contains statistics about `cpu` control group for the node. Cpu *CgroupCpu `json:"cpu,omitempty"` diff --git a/typedapi/types/cgroupcpu.go b/typedapi/types/cgroupcpu.go index 26fa708091..686427d41c 100644 --- a/typedapi/types/cgroupcpu.go +++ b/typedapi/types/cgroupcpu.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CgroupCpu type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L487-L504 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L487-L504 type CgroupCpu struct { // CfsPeriodMicros The period of time, in microseconds, for how regularly all tasks in the same // cgroup as the Elasticsearch process should have their access to CPU resources @@ -64,7 +64,7 @@ func (s *CgroupCpu) UnmarshalJSON(data []byte) error { case "cfs_period_micros": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -80,7 +80,7 @@ func (s *CgroupCpu) UnmarshalJSON(data []byte) error { case "cfs_quota_micros": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/cgroupcpustat.go b/typedapi/types/cgroupcpustat.go index 2e0bac2fdb..e628046852 100644 --- a/typedapi/types/cgroupcpustat.go +++ b/typedapi/types/cgroupcpustat.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CgroupCpuStat type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L506-L519 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L506-L519 type CgroupCpuStat struct { // NumberOfElapsedPeriods The number of reporting periods (as specified by `cfs_period_micros`) that // have elapsed. @@ -60,7 +60,7 @@ func (s *CgroupCpuStat) UnmarshalJSON(data []byte) error { switch t { case "number_of_elapsed_periods": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -75,7 +75,7 @@ func (s *CgroupCpuStat) UnmarshalJSON(data []byte) error { } case "number_of_times_throttled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/cgroupmemory.go b/typedapi/types/cgroupmemory.go index dfe257659c..c5aaa80943 100644 --- a/typedapi/types/cgroupmemory.go +++ b/typedapi/types/cgroupmemory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CgroupMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L521-L537 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L521-L537 type CgroupMemory struct { // ControlGroup The `memory` control group to which the Elasticsearch process belongs. ControlGroup *string `json:"control_group,omitempty"` diff --git a/typedapi/types/chaininput.go b/typedapi/types/chaininput.go index 48fcef7607..795a97d160 100644 --- a/typedapi/types/chaininput.go +++ b/typedapi/types/chaininput.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ChainInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Input.ts#L35-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Input.ts#L35-L37 type ChainInput struct { Inputs []map[string]WatcherInput `json:"inputs"` } diff --git a/typedapi/types/charfilter.go b/typedapi/types/charfilter.go index 1b3f02dec8..6b4ef1b920 100644 --- a/typedapi/types/charfilter.go +++ b/typedapi/types/charfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // string // CharFilterDefinition // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/char_filters.ts#L28-L30 -type CharFilter interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/char_filters.ts#L28-L30 +type CharFilter any diff --git a/typedapi/types/charfilterdefinition.go b/typedapi/types/charfilterdefinition.go index a6ce6962d9..47d37d0939 100644 --- a/typedapi/types/charfilterdefinition.go +++ b/typedapi/types/charfilterdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -28,5 +28,5 @@ package types // IcuNormalizationCharFilter // KuromojiIterationMarkCharFilter // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/char_filters.ts#L32-L41 -type CharFilterDefinition interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/char_filters.ts#L32-L41 +type CharFilterDefinition any diff --git a/typedapi/types/charfilterdetail.go b/typedapi/types/charfilterdetail.go index 64e835de4c..5d43f74678 100644 --- a/typedapi/types/charfilterdetail.go +++ b/typedapi/types/charfilterdetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CharFilterDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/analyze/types.ts#L46-L49 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/analyze/types.ts#L46-L49 type CharFilterDetail struct { FilteredText []string `json:"filtered_text"` Name string `json:"name"` diff --git a/typedapi/types/charfiltertypes.go b/typedapi/types/charfiltertypes.go index df40b3f3d6..98b1bcf930 100644 --- a/typedapi/types/charfiltertypes.go +++ b/typedapi/types/charfiltertypes.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // CharFilterTypes type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L228-L261 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L228-L261 type CharFilterTypes struct { // AnalyzerTypes Contains statistics about analyzer types used in selected nodes. AnalyzerTypes []FieldTypes `json:"analyzer_types"` diff --git a/typedapi/types/chargrouptokenizer.go b/typedapi/types/chargrouptokenizer.go index 1ee9fe607c..a7c04a4d25 100644 --- a/typedapi/types/chargrouptokenizer.go +++ b/typedapi/types/chargrouptokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CharGroupTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L56-L60 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L56-L60 type CharGroupTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` TokenizeOnChars []string `json:"tokenize_on_chars"` @@ -56,7 +56,7 @@ func (s *CharGroupTokenizer) UnmarshalJSON(data []byte) error { case "max_token_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/checkpointing.go b/typedapi/types/checkpointing.go index 376cd141af..8afb1a028c 100644 --- a/typedapi/types/checkpointing.go +++ b/typedapi/types/checkpointing.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Checkpointing type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/get_transform_stats/types.ts#L85-L92 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/get_transform_stats/types.ts#L85-L92 type Checkpointing struct { ChangesLastDetectedAt *int64 `json:"changes_last_detected_at,omitempty"` ChangesLastDetectedAtDateTime DateTime `json:"changes_last_detected_at_date_time,omitempty"` @@ -57,7 +57,7 @@ func (s *Checkpointing) UnmarshalJSON(data []byte) error { switch t { case "changes_last_detected_at": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -82,7 +82,7 @@ func (s *Checkpointing) UnmarshalJSON(data []byte) error { } case "last_search_time": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -102,7 +102,7 @@ func (s *Checkpointing) UnmarshalJSON(data []byte) error { } case "operations_behind": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/checkpointstats.go b/typedapi/types/checkpointstats.go index ea804db85e..f8785467af 100644 --- a/typedapi/types/checkpointstats.go +++ b/typedapi/types/checkpointstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CheckpointStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/get_transform_stats/types.ts#L76-L83 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/get_transform_stats/types.ts#L76-L83 type CheckpointStats struct { Checkpoint int64 `json:"checkpoint"` CheckpointProgress *TransformProgress `json:"checkpoint_progress,omitempty"` @@ -57,7 +57,7 @@ func (s *CheckpointStats) UnmarshalJSON(data []byte) error { switch t { case "checkpoint": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/childrenaggregate.go b/typedapi/types/childrenaggregate.go index 1f7eeac8f8..c5be2029e0 100644 --- a/typedapi/types/childrenaggregate.go +++ b/typedapi/types/childrenaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // ChildrenAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L776-L777 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L783-L784 type ChildrenAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -55,7 +55,7 @@ func (s *ChildrenAggregate) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +519,7 @@ func (s *ChildrenAggregate) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -569,7 +569,7 @@ func (s *ChildrenAggregate) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -579,7 +579,7 @@ func (s *ChildrenAggregate) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -596,7 +596,7 @@ func (s *ChildrenAggregate) UnmarshalJSON(data []byte) error { func (s ChildrenAggregate) MarshalJSON() ([]byte, error) { type opt ChildrenAggregate // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/childrenaggregation.go b/typedapi/types/childrenaggregation.go index 3100700a41..890471f7d5 100644 --- a/typedapi/types/childrenaggregation.go +++ b/typedapi/types/childrenaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,15 +26,12 @@ import ( "errors" "fmt" "io" - "strconv" ) // ChildrenAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L111-L116 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L113-L118 type ChildrenAggregation struct { - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` // Type The child type that should be selected. Type *string `json:"type,omitempty"` } @@ -54,23 +51,6 @@ func (s *ChildrenAggregation) UnmarshalJSON(data []byte) error { switch t { - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "type": if err := dec.Decode(&s.Type); err != nil { return fmt.Errorf("%s | %w", "Type", err) diff --git a/typedapi/types/chisquareheuristic.go b/typedapi/types/chisquareheuristic.go index 3702451ca2..730e4efc12 100644 --- a/typedapi/types/chisquareheuristic.go +++ b/typedapi/types/chisquareheuristic.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ChiSquareHeuristic type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L735-L744 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L737-L746 type ChiSquareHeuristic struct { // BackgroundIsSuperset Set to `false` if you defined a custom background filter that represents a // different set of documents that you want to compare to. @@ -57,7 +57,7 @@ func (s *ChiSquareHeuristic) UnmarshalJSON(data []byte) error { switch t { case "background_is_superset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *ChiSquareHeuristic) UnmarshalJSON(data []byte) error { } case "include_negatives": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/chunkingconfig.go b/typedapi/types/chunkingconfig.go index 43c8e75c8a..0eb30b9581 100644 --- a/typedapi/types/chunkingconfig.go +++ b/typedapi/types/chunkingconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // ChunkingConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Datafeed.ts#L239-L252 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Datafeed.ts#L238-L251 type ChunkingConfig struct { // Mode If the mode is `auto`, the chunk size is dynamically calculated; // this is the recommended value when the datafeed does not use aggregations. diff --git a/typedapi/types/circleprocessor.go b/typedapi/types/circleprocessor.go index 2c4d3f45e9..19b612a57b 100644 --- a/typedapi/types/circleprocessor.go +++ b/typedapi/types/circleprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // CircleProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L410-L433 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L410-L433 type CircleProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -93,7 +93,7 @@ func (s *CircleProcessor) UnmarshalJSON(data []byte) error { s.Description = &o case "error_distance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -126,7 +126,7 @@ func (s *CircleProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -140,7 +140,7 @@ func (s *CircleProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/classificationinferenceoptions.go b/typedapi/types/classificationinferenceoptions.go index 00c4c5201c..61a51dbab9 100644 --- a/typedapi/types/classificationinferenceoptions.go +++ b/typedapi/types/classificationinferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClassificationInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L93-L108 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L93-L108 type ClassificationInferenceOptions struct { // NumTopClasses Specifies the number of top class predictions to return. Defaults to 0. NumTopClasses *int `json:"num_top_classes,omitempty"` @@ -66,7 +66,7 @@ func (s *ClassificationInferenceOptions) UnmarshalJSON(data []byte) error { case "num_top_classes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -82,7 +82,7 @@ func (s *ClassificationInferenceOptions) UnmarshalJSON(data []byte) error { case "num_top_feature_importance_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/cleanuprepositoryresults.go b/typedapi/types/cleanuprepositoryresults.go index c40fe132e1..0f6bbcabef 100644 --- a/typedapi/types/cleanuprepositoryresults.go +++ b/typedapi/types/cleanuprepositoryresults.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CleanupRepositoryResults type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/cleanup_repository/SnapshotCleanupRepositoryResponse.ts#L29-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/cleanup_repository/SnapshotCleanupRepositoryResponse.ts#L29-L34 type CleanupRepositoryResults struct { // DeletedBlobs Number of binary large objects (blobs) removed during cleanup. DeletedBlobs int64 `json:"deleted_blobs"` @@ -55,7 +55,7 @@ func (s *CleanupRepositoryResults) UnmarshalJSON(data []byte) error { switch t { case "deleted_blobs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *CleanupRepositoryResults) UnmarshalJSON(data []byte) error { } case "deleted_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/client.go b/typedapi/types/client.go index 728c5dda45..24f4455c98 100644 --- a/typedapi/types/client.go +++ b/typedapi/types/client.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Client type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L649-L696 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L649-L696 type Client struct { // Agent Reported agent for the HTTP client. // If unavailable, this property is not included in the response. @@ -87,7 +87,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { s.Agent = &o case "closed_time_millis": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -102,7 +102,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { } case "id": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -117,7 +117,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { } case "last_request_time_millis": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -156,7 +156,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { s.LocalAddress = &o case "opened_time_millis": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -183,7 +183,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { s.RemoteAddress = &o case "request_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -198,7 +198,7 @@ func (s *Client) UnmarshalJSON(data []byte) error { } case "request_size_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/closeindexresult.go b/typedapi/types/closeindexresult.go index 7a87b86ffe..cd9408cc3a 100644 --- a/typedapi/types/closeindexresult.go +++ b/typedapi/types/closeindexresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CloseIndexResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/close/CloseIndexResponse.ts#L32-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/close/CloseIndexResponse.ts#L32-L35 type CloseIndexResult struct { Closed bool `json:"closed"` Shards map[string]CloseShardResult `json:"shards,omitempty"` @@ -53,7 +53,7 @@ func (s *CloseIndexResult) UnmarshalJSON(data []byte) error { switch t { case "closed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/closeshardresult.go b/typedapi/types/closeshardresult.go index e7d323649a..b3101293a2 100644 --- a/typedapi/types/closeshardresult.go +++ b/typedapi/types/closeshardresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // CloseShardResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/close/CloseIndexResponse.ts#L37-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/close/CloseIndexResponse.ts#L37-L39 type CloseShardResult struct { Failures []ShardFailure `json:"failures"` } diff --git a/typedapi/types/clusterappliedstats.go b/typedapi/types/clusterappliedstats.go index 7ee915cdbf..646b982a0d 100644 --- a/typedapi/types/clusterappliedstats.go +++ b/typedapi/types/clusterappliedstats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ClusterAppliedStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L221-L223 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L221-L223 type ClusterAppliedStats struct { Recordings []Recording `json:"recordings,omitempty"` } diff --git a/typedapi/types/clustercomponenttemplate.go b/typedapi/types/clustercomponenttemplate.go index 65156198fd..fd8ab36e9e 100644 --- a/typedapi/types/clustercomponenttemplate.go +++ b/typedapi/types/clustercomponenttemplate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ClusterComponentTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/_types/ComponentTemplate.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/_types/ComponentTemplate.ts#L30-L33 type ClusterComponentTemplate struct { ComponentTemplate ComponentTemplateNode `json:"component_template"` Name string `json:"name"` diff --git a/typedapi/types/clusterdetails.go b/typedapi/types/clusterdetails.go index 4cfd9fef67..8459a120a1 100644 --- a/typedapi/types/clusterdetails.go +++ b/typedapi/types/clusterdetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // ClusterDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L45-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L45-L52 type ClusterDetails struct { Failures []ShardFailure `json:"failures,omitempty"` Indices string `json:"indices"` @@ -86,7 +86,7 @@ func (s *ClusterDetails) UnmarshalJSON(data []byte) error { } case "timed_out": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterfilesystem.go b/typedapi/types/clusterfilesystem.go index bba39ef9fe..8ca4db8f59 100644 --- a/typedapi/types/clusterfilesystem.go +++ b/typedapi/types/clusterfilesystem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterFileSystem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L34-L49 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L34-L49 type ClusterFileSystem struct { // AvailableInBytes Total number of bytes available to JVM in file stores across all selected // nodes. @@ -62,7 +62,7 @@ func (s *ClusterFileSystem) UnmarshalJSON(data []byte) error { switch t { case "available_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -77,7 +77,7 @@ func (s *ClusterFileSystem) UnmarshalJSON(data []byte) error { } case "free_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *ClusterFileSystem) UnmarshalJSON(data []byte) error { } case "total_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterindexingpressure.go b/typedapi/types/clusterindexingpressure.go index 4705e38495..d37ca49ca6 100644 --- a/typedapi/types/clusterindexingpressure.go +++ b/typedapi/types/clusterindexingpressure.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ClusterIndexingPressure type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L570-L572 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L570-L572 type ClusterIndexingPressure struct { Memory ClusterPressureMemory `json:"memory"` } diff --git a/typedapi/types/clusterindices.go b/typedapi/types/clusterindices.go index 6890655f78..44893fbefa 100644 --- a/typedapi/types/clusterindices.go +++ b/typedapi/types/clusterindices.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterIndices type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L74-L107 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L74-L107 type ClusterIndices struct { // Analysis Contains statistics about analyzers and analyzer components used in selected // nodes. @@ -85,7 +85,7 @@ func (s *ClusterIndices) UnmarshalJSON(data []byte) error { } case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterindicesshards.go b/typedapi/types/clusterindicesshards.go index 06dbbb772b..9c20cdefb1 100644 --- a/typedapi/types/clusterindicesshards.go +++ b/typedapi/types/clusterindicesshards.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterIndicesShards type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L60-L72 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L60-L72 type ClusterIndicesShards struct { // Index Contains statistics about shards assigned to selected nodes. Index *ClusterIndicesShardsIndex `json:"index,omitempty"` @@ -64,7 +64,7 @@ func (s *ClusterIndicesShards) UnmarshalJSON(data []byte) error { } case "primaries": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -80,7 +80,7 @@ func (s *ClusterIndicesShards) UnmarshalJSON(data []byte) error { } case "replication": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *ClusterIndicesShards) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterindicesshardsindex.go b/typedapi/types/clusterindicesshardsindex.go index 296707339e..ff65d457ef 100644 --- a/typedapi/types/clusterindicesshardsindex.go +++ b/typedapi/types/clusterindicesshardsindex.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ClusterIndicesShardsIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L51-L58 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L51-L58 type ClusterIndicesShardsIndex struct { // Primaries Contains statistics about the number of primary shards assigned to selected // nodes. diff --git a/typedapi/types/clusterinfo.go b/typedapi/types/clusterinfo.go index a33b4c2a5a..0123bdde91 100644 --- a/typedapi/types/clusterinfo.go +++ b/typedapi/types/clusterinfo.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ClusterInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/allocation_explain/types.ts#L48-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/allocation_explain/types.ts#L48-L54 type ClusterInfo struct { Nodes map[string]NodeDiskUsage `json:"nodes"` ReservedSizes []ReservedSize `json:"reserved_sizes"` diff --git a/typedapi/types/clusterinfotargets.go b/typedapi/types/clusterinfotargets.go index 4f172d89ab..1bedb46fef 100644 --- a/typedapi/types/clusterinfotargets.go +++ b/typedapi/types/clusterinfotargets.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,5 +26,5 @@ import ( // ClusterInfoTargets type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L386-L386 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L389-L389 type ClusterInfoTargets []clusterinfotarget.ClusterInfoTarget diff --git a/typedapi/types/clusteringest.go b/typedapi/types/clusteringest.go index 0161467538..927b63219f 100644 --- a/typedapi/types/clusteringest.go +++ b/typedapi/types/clusteringest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterIngest type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L270-L273 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L270-L273 type ClusterIngest struct { NumberOfPipelines int `json:"number_of_pipelines"` ProcessorStats map[string]ClusterProcessor `json:"processor_stats"` @@ -54,7 +54,7 @@ func (s *ClusterIngest) UnmarshalJSON(data []byte) error { case "number_of_pipelines": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterjvm.go b/typedapi/types/clusterjvm.go index 82462801b0..883c143a51 100644 --- a/typedapi/types/clusterjvm.go +++ b/typedapi/types/clusterjvm.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterJvm type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L275-L292 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L275-L292 type ClusterJvm struct { // MaxUptimeInMillis Uptime duration, in milliseconds, since JVM last started. MaxUptimeInMillis int64 `json:"max_uptime_in_millis"` @@ -69,7 +69,7 @@ func (s *ClusterJvm) UnmarshalJSON(data []byte) error { } case "threads": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterjvmmemory.go b/typedapi/types/clusterjvmmemory.go index d17b26a8c2..cdd5909b92 100644 --- a/typedapi/types/clusterjvmmemory.go +++ b/typedapi/types/clusterjvmmemory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterJvmMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L294-L303 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L294-L303 type ClusterJvmMemory struct { // HeapMaxInBytes Maximum amount of memory, in bytes, available for use by the heap across all // selected nodes. @@ -56,7 +56,7 @@ func (s *ClusterJvmMemory) UnmarshalJSON(data []byte) error { switch t { case "heap_max_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *ClusterJvmMemory) UnmarshalJSON(data []byte) error { } case "heap_used_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterjvmversion.go b/typedapi/types/clusterjvmversion.go index 7028d1c8a9..d495539322 100644 --- a/typedapi/types/clusterjvmversion.go +++ b/typedapi/types/clusterjvmversion.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterJvmVersion type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L305-L335 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L305-L335 type ClusterJvmVersion struct { // BundledJdk Always `true`. All distributions come with a bundled Java Development Kit // (JDK). @@ -68,7 +68,7 @@ func (s *ClusterJvmVersion) UnmarshalJSON(data []byte) error { switch t { case "bundled_jdk": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *ClusterJvmVersion) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -98,7 +98,7 @@ func (s *ClusterJvmVersion) UnmarshalJSON(data []byte) error { } case "using_bundled_jdk": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusternetworktypes.go b/typedapi/types/clusternetworktypes.go index 1272f1a4e2..cb5e9fa176 100644 --- a/typedapi/types/clusternetworktypes.go +++ b/typedapi/types/clusternetworktypes.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ClusterNetworkTypes type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L337-L346 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L337-L346 type ClusterNetworkTypes struct { // HttpTypes Contains statistics about the HTTP network types used by selected nodes. HttpTypes map[string]int `json:"http_types"` diff --git a/typedapi/types/clusternode.go b/typedapi/types/clusternode.go index a8545eb935..cdc93bc3cd 100644 --- a/typedapi/types/clusternode.go +++ b/typedapi/types/clusternode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ClusterNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/ClusterNode.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/ClusterNode.ts#L22-L24 type ClusterNode struct { Name string `json:"name"` } diff --git a/typedapi/types/clusternodecount.go b/typedapi/types/clusternodecount.go index 26fdc0f53d..0096c0ac33 100644 --- a/typedapi/types/clusternodecount.go +++ b/typedapi/types/clusternodecount.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterNodeCount type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L348-L367 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L348-L367 type ClusterNodeCount struct { CoordinatingOnly int `json:"coordinating_only"` Data int `json:"data"` @@ -66,7 +66,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case "coordinating_only": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -82,7 +82,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case "data": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -98,7 +98,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case "data_cold": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case "data_content": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -130,7 +130,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case "data_frozen": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -146,7 +146,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case "data_hot": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -162,7 +162,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case "data_warm": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -178,7 +178,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case "ingest": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -194,7 +194,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case "master": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -210,7 +210,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case "ml": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -226,7 +226,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case "remote_cluster_client": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -242,7 +242,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -258,7 +258,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case "transform": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -274,7 +274,7 @@ func (s *ClusterNodeCount) UnmarshalJSON(data []byte) error { case "voting_only": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusternodes.go b/typedapi/types/clusternodes.go index 917b03d854..dad9ee67e0 100644 --- a/typedapi/types/clusternodes.go +++ b/typedapi/types/clusternodes.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ClusterNodes type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L369-L402 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L369-L402 type ClusterNodes struct { // Count Contains counts for nodes selected by the request’s node filters. Count ClusterNodeCount `json:"count"` diff --git a/typedapi/types/clusteroperatingsystem.go b/typedapi/types/clusteroperatingsystem.go index 5beca56e1f..28d8c3f1ff 100644 --- a/typedapi/types/clusteroperatingsystem.go +++ b/typedapi/types/clusteroperatingsystem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterOperatingSystem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L415-L442 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L415-L442 type ClusterOperatingSystem struct { // AllocatedProcessors Number of processors used to calculate thread pool size across all selected // nodes. @@ -69,7 +69,7 @@ func (s *ClusterOperatingSystem) UnmarshalJSON(data []byte) error { case "allocated_processors": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -90,7 +90,7 @@ func (s *ClusterOperatingSystem) UnmarshalJSON(data []byte) error { case "available_processors": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusteroperatingsystemarchitecture.go b/typedapi/types/clusteroperatingsystemarchitecture.go index f388a2b9e7..cf9a00d8ba 100644 --- a/typedapi/types/clusteroperatingsystemarchitecture.go +++ b/typedapi/types/clusteroperatingsystemarchitecture.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterOperatingSystemArchitecture type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L404-L413 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L404-L413 type ClusterOperatingSystemArchitecture struct { // Arch Name of an architecture used by one or more selected nodes. Arch string `json:"arch"` @@ -68,7 +68,7 @@ func (s *ClusterOperatingSystemArchitecture) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusteroperatingsystemname.go b/typedapi/types/clusteroperatingsystemname.go index bbc40e8642..24313f531c 100644 --- a/typedapi/types/clusteroperatingsystemname.go +++ b/typedapi/types/clusteroperatingsystemname.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterOperatingSystemName type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L444-L453 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L444-L453 type ClusterOperatingSystemName struct { // Count Number of selected nodes using the operating system. Count int `json:"count"` @@ -56,7 +56,7 @@ func (s *ClusterOperatingSystemName) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusteroperatingsystemprettyname.go b/typedapi/types/clusteroperatingsystemprettyname.go index 0aba3abdd3..2e682124ab 100644 --- a/typedapi/types/clusteroperatingsystemprettyname.go +++ b/typedapi/types/clusteroperatingsystemprettyname.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterOperatingSystemPrettyName type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L455-L464 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L455-L464 type ClusterOperatingSystemPrettyName struct { // Count Number of selected nodes using the operating system. Count int `json:"count"` @@ -57,7 +57,7 @@ func (s *ClusterOperatingSystemPrettyName) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterpressurememory.go b/typedapi/types/clusterpressurememory.go index 8f5041ec32..fa5187802c 100644 --- a/typedapi/types/clusterpressurememory.go +++ b/typedapi/types/clusterpressurememory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterPressureMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L574-L578 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L574-L578 type ClusterPressureMemory struct { Current IndexingPressureMemorySummary `json:"current"` LimitInBytes int64 `json:"limit_in_bytes"` @@ -59,7 +59,7 @@ func (s *ClusterPressureMemory) UnmarshalJSON(data []byte) error { } case "limit_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterprocess.go b/typedapi/types/clusterprocess.go index 63cf53125d..f7201288f2 100644 --- a/typedapi/types/clusterprocess.go +++ b/typedapi/types/clusterprocess.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ClusterProcess type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L466-L475 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L466-L475 type ClusterProcess struct { // Cpu Contains statistics about CPU used by selected nodes. Cpu ClusterProcessCpu `json:"cpu"` diff --git a/typedapi/types/clusterprocesscpu.go b/typedapi/types/clusterprocesscpu.go index d50d2869b2..9beeedef94 100644 --- a/typedapi/types/clusterprocesscpu.go +++ b/typedapi/types/clusterprocesscpu.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterProcessCpu type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L477-L483 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L477-L483 type ClusterProcessCpu struct { // Percent Percentage of CPU used across all selected nodes. // Returns `-1` if not supported. @@ -55,7 +55,7 @@ func (s *ClusterProcessCpu) UnmarshalJSON(data []byte) error { case "percent": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterprocessopenfiledescriptors.go b/typedapi/types/clusterprocessopenfiledescriptors.go index 27ff214a1b..37f789eb02 100644 --- a/typedapi/types/clusterprocessopenfiledescriptors.go +++ b/typedapi/types/clusterprocessopenfiledescriptors.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterProcessOpenFileDescriptors type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L485-L501 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L485-L501 type ClusterProcessOpenFileDescriptors struct { // Avg Average number of concurrently open file descriptors. // Returns `-1` if not supported. @@ -62,7 +62,7 @@ func (s *ClusterProcessOpenFileDescriptors) UnmarshalJSON(data []byte) error { switch t { case "avg": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -77,7 +77,7 @@ func (s *ClusterProcessOpenFileDescriptors) UnmarshalJSON(data []byte) error { } case "max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *ClusterProcessOpenFileDescriptors) UnmarshalJSON(data []byte) error { } case "min": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterprocessor.go b/typedapi/types/clusterprocessor.go index 13e69bc96f..a3f9c9533f 100644 --- a/typedapi/types/clusterprocessor.go +++ b/typedapi/types/clusterprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L503-L509 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L503-L509 type ClusterProcessor struct { Count int64 `json:"count"` Current int64 `json:"current"` @@ -56,7 +56,7 @@ func (s *ClusterProcessor) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *ClusterProcessor) UnmarshalJSON(data []byte) error { } case "current": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -86,7 +86,7 @@ func (s *ClusterProcessor) UnmarshalJSON(data []byte) error { } case "failed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterremoteinfo.go b/typedapi/types/clusterremoteinfo.go index b629b165c8..05ba009b29 100644 --- a/typedapi/types/clusterremoteinfo.go +++ b/typedapi/types/clusterremoteinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // ClusterRemoteSniffInfo // ClusterRemoteProxyInfo // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L29-L30 -type ClusterRemoteInfo interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L29-L30 +type ClusterRemoteInfo any diff --git a/typedapi/types/clusterremoteproxyinfo.go b/typedapi/types/clusterremoteproxyinfo.go index c7c9992b22..dfd5652005 100644 --- a/typedapi/types/clusterremoteproxyinfo.go +++ b/typedapi/types/clusterremoteproxyinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterRemoteProxyInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L42-L51 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L42-L51 type ClusterRemoteProxyInfo struct { Connected bool `json:"connected"` InitialConnectTimeout Duration `json:"initial_connect_timeout"` @@ -59,7 +59,7 @@ func (s *ClusterRemoteProxyInfo) UnmarshalJSON(data []byte) error { switch t { case "connected": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -79,7 +79,7 @@ func (s *ClusterRemoteProxyInfo) UnmarshalJSON(data []byte) error { case "max_proxy_socket_connections": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *ClusterRemoteProxyInfo) UnmarshalJSON(data []byte) error { case "num_proxy_sockets_connected": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -139,7 +139,7 @@ func (s *ClusterRemoteProxyInfo) UnmarshalJSON(data []byte) error { s.ServerName = o case "skip_unavailable": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterremotesniffinfo.go b/typedapi/types/clusterremotesniffinfo.go index c9bad5587c..d7613fa958 100644 --- a/typedapi/types/clusterremotesniffinfo.go +++ b/typedapi/types/clusterremotesniffinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterRemoteSniffInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L32-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/remote_info/ClusterRemoteInfoResponse.ts#L32-L40 type ClusterRemoteSniffInfo struct { Connected bool `json:"connected"` InitialConnectTimeout Duration `json:"initial_connect_timeout"` @@ -58,7 +58,7 @@ func (s *ClusterRemoteSniffInfo) UnmarshalJSON(data []byte) error { switch t { case "connected": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -78,7 +78,7 @@ func (s *ClusterRemoteSniffInfo) UnmarshalJSON(data []byte) error { case "max_connections_per_cluster": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -98,7 +98,7 @@ func (s *ClusterRemoteSniffInfo) UnmarshalJSON(data []byte) error { } case "num_nodes_connected": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -118,7 +118,7 @@ func (s *ClusterRemoteSniffInfo) UnmarshalJSON(data []byte) error { } case "skip_unavailable": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterruntimefieldtypes.go b/typedapi/types/clusterruntimefieldtypes.go index 18144bf107..2524e2a488 100644 --- a/typedapi/types/clusterruntimefieldtypes.go +++ b/typedapi/types/clusterruntimefieldtypes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterRuntimeFieldTypes type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L169-L226 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L169-L226 type ClusterRuntimeFieldTypes struct { // CharsMax Maximum number of characters for a single runtime field script. CharsMax int `json:"chars_max"` @@ -85,7 +85,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case "chars_max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -101,7 +101,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case "chars_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -117,7 +117,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -133,7 +133,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case "doc_max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -149,7 +149,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case "doc_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -165,7 +165,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case "index_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -186,7 +186,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case "lines_max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -202,7 +202,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case "lines_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -223,7 +223,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case "scriptless_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -239,7 +239,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case "shadowed_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -255,7 +255,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case "source_max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -271,7 +271,7 @@ func (s *ClusterRuntimeFieldTypes) UnmarshalJSON(data []byte) error { case "source_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clustershardmetrics.go b/typedapi/types/clustershardmetrics.go index 85b93d40c2..a38c4d2b74 100644 --- a/typedapi/types/clustershardmetrics.go +++ b/typedapi/types/clustershardmetrics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterShardMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L511-L524 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L511-L524 type ClusterShardMetrics struct { // Avg Mean number of shards in an index, counting only shards assigned to selected // nodes. @@ -60,7 +60,7 @@ func (s *ClusterShardMetrics) UnmarshalJSON(data []byte) error { switch t { case "avg": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -76,7 +76,7 @@ func (s *ClusterShardMetrics) UnmarshalJSON(data []byte) error { } case "max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *ClusterShardMetrics) UnmarshalJSON(data []byte) error { } case "min": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterstatequeue.go b/typedapi/types/clusterstatequeue.go index f8cd383aef..f5a7727f5c 100644 --- a/typedapi/types/clusterstatequeue.go +++ b/typedapi/types/clusterstatequeue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterStateQueue type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L248-L261 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L248-L261 type ClusterStateQueue struct { // Committed Number of committed cluster states in queue. Committed *int64 `json:"committed,omitempty"` @@ -57,7 +57,7 @@ func (s *ClusterStateQueue) UnmarshalJSON(data []byte) error { switch t { case "committed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *ClusterStateQueue) UnmarshalJSON(data []byte) error { } case "pending": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *ClusterStateQueue) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterstateupdate.go b/typedapi/types/clusterstateupdate.go index 3652d6e01f..31c6e501a4 100644 --- a/typedapi/types/clusterstateupdate.go +++ b/typedapi/types/clusterstateupdate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterStateUpdate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L278-L343 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L278-L343 type ClusterStateUpdate struct { // CommitTime The cumulative amount of time spent waiting for a successful cluster state // update to commit, which measures the time from the start of each publication @@ -161,7 +161,7 @@ func (s *ClusterStateUpdate) UnmarshalJSON(data []byte) error { } case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/clusterstatistics.go b/typedapi/types/clusterstatistics.go index 9ee69a7d44..15cc3dff2e 100644 --- a/typedapi/types/clusterstatistics.go +++ b/typedapi/types/clusterstatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ClusterStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L27-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L27-L35 type ClusterStatistics struct { Details map[string]ClusterDetails `json:"details,omitempty"` Failed int `json:"failed"` @@ -67,7 +67,7 @@ func (s *ClusterStatistics) UnmarshalJSON(data []byte) error { case "failed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *ClusterStatistics) UnmarshalJSON(data []byte) error { case "partial": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -99,7 +99,7 @@ func (s *ClusterStatistics) UnmarshalJSON(data []byte) error { case "running": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -115,7 +115,7 @@ func (s *ClusterStatistics) UnmarshalJSON(data []byte) error { case "skipped": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -131,7 +131,7 @@ func (s *ClusterStatistics) UnmarshalJSON(data []byte) error { case "successful": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -147,7 +147,7 @@ func (s *ClusterStatistics) UnmarshalJSON(data []byte) error { case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/collector.go b/typedapi/types/collector.go index dda3393a1b..fa8737084b 100644 --- a/typedapi/types/collector.go +++ b/typedapi/types/collector.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Collector type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/profile.ts#L86-L91 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/profile.ts#L86-L91 type Collector struct { Children []Collector `json:"children,omitempty"` Name string `json:"name"` diff --git a/typedapi/types/column.go b/typedapi/types/column.go index b5984fc294..69454d8cb0 100644 --- a/typedapi/types/column.go +++ b/typedapi/types/column.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Column type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/sql/types.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/sql/types.ts#L23-L26 type Column struct { Name string `json:"name"` Type string `json:"type"` diff --git a/typedapi/types/combinedfieldsquery.go b/typedapi/types/combinedfieldsquery.go index 64034b390c..aaf3e9cf7f 100644 --- a/typedapi/types/combinedfieldsquery.go +++ b/typedapi/types/combinedfieldsquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // CombinedFieldsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/abstractions.ts#L445-L479 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/abstractions.ts#L465-L499 type CombinedFieldsQuery struct { // AutoGenerateSynonymsPhraseQuery If true, match phrase queries are automatically created for multi-term // synonyms. @@ -78,7 +78,7 @@ func (s *CombinedFieldsQuery) UnmarshalJSON(data []byte) error { switch t { case "auto_generate_synonyms_phrase_query": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *CombinedFieldsQuery) UnmarshalJSON(data []byte) error { } case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/command.go b/typedapi/types/command.go index 64adbe52cd..f1c35e5b94 100644 --- a/typedapi/types/command.go +++ b/typedapi/types/command.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Command type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/reroute/types.ts#L22-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/reroute/types.ts#L22-L43 type Command struct { // AllocateEmptyPrimary Allocate an empty primary shard to a node. Accepts the index and shard for // index name and shard number, and node to allocate the shard to. Using this diff --git a/typedapi/types/commandallocateprimaryaction.go b/typedapi/types/commandallocateprimaryaction.go index 0b0b478fb3..d53e4c3e5a 100644 --- a/typedapi/types/commandallocateprimaryaction.go +++ b/typedapi/types/commandallocateprimaryaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CommandAllocatePrimaryAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/reroute/types.ts#L78-L84 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/reroute/types.ts#L78-L84 type CommandAllocatePrimaryAction struct { // AcceptDataLoss If a node which has a copy of the data rejoins the cluster later on, that // data will be deleted. To ensure that these implications are well-understood, @@ -58,7 +58,7 @@ func (s *CommandAllocatePrimaryAction) UnmarshalJSON(data []byte) error { switch t { case "accept_data_loss": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -90,7 +90,7 @@ func (s *CommandAllocatePrimaryAction) UnmarshalJSON(data []byte) error { case "shard": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/commandallocatereplicaaction.go b/typedapi/types/commandallocatereplicaaction.go index f908fbe892..0b7a5bd4f7 100644 --- a/typedapi/types/commandallocatereplicaaction.go +++ b/typedapi/types/commandallocatereplicaaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CommandAllocateReplicaAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/reroute/types.ts#L69-L76 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/reroute/types.ts#L69-L76 type CommandAllocateReplicaAction struct { Index string `json:"index"` Node string `json:"node"` @@ -72,7 +72,7 @@ func (s *CommandAllocateReplicaAction) UnmarshalJSON(data []byte) error { case "shard": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/commandcancelaction.go b/typedapi/types/commandcancelaction.go index 4b5f9816f8..58e23a5852 100644 --- a/typedapi/types/commandcancelaction.go +++ b/typedapi/types/commandcancelaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CommandCancelAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/reroute/types.ts#L45-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/reroute/types.ts#L45-L50 type CommandCancelAction struct { AllowPrimary *bool `json:"allow_primary,omitempty"` Index string `json:"index"` @@ -55,7 +55,7 @@ func (s *CommandCancelAction) UnmarshalJSON(data []byte) error { switch t { case "allow_primary": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *CommandCancelAction) UnmarshalJSON(data []byte) error { case "shard": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/commandmoveaction.go b/typedapi/types/commandmoveaction.go index 0e4981cf17..ad8e0a8991 100644 --- a/typedapi/types/commandmoveaction.go +++ b/typedapi/types/commandmoveaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CommandMoveAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/reroute/types.ts#L60-L67 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/reroute/types.ts#L60-L67 type CommandMoveAction struct { // FromNode The node to move the shard from FromNode string `json:"from_node"` @@ -75,7 +75,7 @@ func (s *CommandMoveAction) UnmarshalJSON(data []byte) error { case "shard": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/commoncatqueryparameters.go b/typedapi/types/commoncatqueryparameters.go new file mode 100644 index 0000000000..688891b9f6 --- /dev/null +++ b/typedapi/types/commoncatqueryparameters.go @@ -0,0 +1,175 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// CommonCatQueryParameters type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_spec_utils/behaviors.ts#L86-L132 +type CommonCatQueryParameters struct { + // Format Specifies the format to return the columnar data in, can be set to + // `text`, `json`, `cbor`, `yaml`, or `smile`. + Format *string `json:"format,omitempty"` + // H List of columns to appear in the response. Supports simple wildcards. + H []string `json:"h,omitempty"` + // Help When set to `true` will output available columns. This option + // can't be combined with any other query string option. + Help *bool `json:"help,omitempty"` + // Local If `true`, the request computes the list of selected nodes from the + // local cluster state. If `false` the list of selected nodes are computed + // from the cluster state of the master node. In both cases the coordinating + // node will send requests for further information to each selected node. + Local *bool `json:"local,omitempty"` + // MasterTimeout Period to wait for a connection to the master node. + MasterTimeout Duration `json:"master_timeout,omitempty"` + // S List of columns that determine how the table should be sorted. + // Sorting defaults to ascending and can be changed by setting `:asc` + // or `:desc` as a suffix to the column name. + S []string `json:"s,omitempty"` + // V When set to `true` will enable verbose output. + V *bool `json:"v,omitempty"` +} + +func (s *CommonCatQueryParameters) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "format": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Format", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Format = &o + + case "h": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "H", err) + } + + s.H = append(s.H, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.H); err != nil { + return fmt.Errorf("%s | %w", "H", err) + } + } + + case "help": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Help", err) + } + s.Help = &value + case bool: + s.Help = &v + } + + case "local": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Local", err) + } + s.Local = &value + case bool: + s.Local = &v + } + + case "master_timeout": + if err := dec.Decode(&s.MasterTimeout); err != nil { + return fmt.Errorf("%s | %w", "MasterTimeout", err) + } + + case "s": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "S", err) + } + + s.S = append(s.S, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.S); err != nil { + return fmt.Errorf("%s | %w", "S", err) + } + } + + case "v": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "V", err) + } + s.V = &value + case bool: + s.V = &v + } + + } + } + return nil +} + +// NewCommonCatQueryParameters returns a CommonCatQueryParameters. +func NewCommonCatQueryParameters() *CommonCatQueryParameters { + r := &CommonCatQueryParameters{} + + return r +} diff --git a/typedapi/types/commongramstokenfilter.go b/typedapi/types/commongramstokenfilter.go index 587292a198..2eedec77e5 100644 --- a/typedapi/types/commongramstokenfilter.go +++ b/typedapi/types/commongramstokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CommonGramsTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L173-L179 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L175-L181 type CommonGramsTokenFilter struct { CommonWords []string `json:"common_words,omitempty"` CommonWordsPath *string `json:"common_words_path,omitempty"` @@ -74,7 +74,7 @@ func (s *CommonGramsTokenFilter) UnmarshalJSON(data []byte) error { s.CommonWordsPath = &o case "ignore_case": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *CommonGramsTokenFilter) UnmarshalJSON(data []byte) error { } case "query_mode": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/commonqueryparameters.go b/typedapi/types/commonqueryparameters.go new file mode 100644 index 0000000000..7d1f36e6aa --- /dev/null +++ b/typedapi/types/commonqueryparameters.go @@ -0,0 +1,137 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// CommonQueryParameters type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_spec_utils/behaviors.ts#L50-L84 +type CommonQueryParameters struct { + // ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors + // when they occur. + ErrorTrace *bool `json:"error_trace,omitempty"` + // FilterPath Comma-separated list of filters in dot notation which reduce the response + // returned by Elasticsearch. + FilterPath []string `json:"filter_path,omitempty"` + // Human When set to `true` will return statistics in a format suitable for humans. + // For example `"exists_time": "1h"` for humans and + // `"eixsts_time_in_millis": 3600000` for computers. When disabled the human + // readable values will be omitted. This makes sense for responses being + // consumed + // only by machines. + Human *bool `json:"human,omitempty"` + // Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use + // this option for debugging only. + Pretty *bool `json:"pretty,omitempty"` +} + +func (s *CommonQueryParameters) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "error_trace": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "ErrorTrace", err) + } + s.ErrorTrace = &value + case bool: + s.ErrorTrace = &v + } + + case "filter_path": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "FilterPath", err) + } + + s.FilterPath = append(s.FilterPath, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.FilterPath); err != nil { + return fmt.Errorf("%s | %w", "FilterPath", err) + } + } + + case "human": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Human", err) + } + s.Human = &value + case bool: + s.Human = &v + } + + case "pretty": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Pretty", err) + } + s.Pretty = &value + case bool: + s.Pretty = &v + } + + } + } + return nil +} + +// NewCommonQueryParameters returns a CommonQueryParameters. +func NewCommonQueryParameters() *CommonQueryParameters { + r := &CommonQueryParameters{} + + return r +} diff --git a/typedapi/types/commontermsquery.go b/typedapi/types/commontermsquery.go index c026ffa3ba..860c371c28 100644 --- a/typedapi/types/commontermsquery.go +++ b/typedapi/types/commontermsquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // CommonTermsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L34-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L34-L44 type CommonTermsQuery struct { Analyzer *string `json:"analyzer,omitempty"` // Boost Floating point number used to decrease or increase the relevance scores of @@ -87,7 +87,7 @@ func (s *CommonTermsQuery) UnmarshalJSON(data []byte) error { s.Analyzer = &o case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -103,7 +103,7 @@ func (s *CommonTermsQuery) UnmarshalJSON(data []byte) error { } case "cutoff_frequency": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/compactnodeinfo.go b/typedapi/types/compactnodeinfo.go index c1ed0b8892..4711c96040 100644 --- a/typedapi/types/compactnodeinfo.go +++ b/typedapi/types/compactnodeinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // CompactNodeInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/verify_repository/SnapshotVerifyRepositoryResponse.ts#L27-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/verify_repository/SnapshotVerifyRepositoryResponse.ts#L27-L29 type CompactNodeInfo struct { Name string `json:"name"` } diff --git a/typedapi/types/completioncontext.go b/typedapi/types/completioncontext.go index 72a3ff9f12..7276799538 100644 --- a/typedapi/types/completioncontext.go +++ b/typedapi/types/completioncontext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CompletionContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L232-L261 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L235-L264 type CompletionContext struct { // Boost The factor by which the score of the suggestion should be boosted. // The score is computed by multiplying the boost with the suggestion weight. @@ -74,7 +74,7 @@ func (s *CompletionContext) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -120,7 +120,7 @@ func (s *CompletionContext) UnmarshalJSON(data []byte) error { } case "prefix": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/completionproperty.go b/typedapi/types/completionproperty.go index 1989da430e..851e13ad48 100644 --- a/typedapi/types/completionproperty.go +++ b/typedapi/types/completionproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // CompletionProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/specialized.ts#L27-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/specialized.ts#L33-L41 type CompletionProperty struct { Analyzer *string `json:"analyzer,omitempty"` Contexts []SuggestContext `json:"contexts,omitempty"` @@ -103,7 +103,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -128,7 +128,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -149,7 +149,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -239,12 +239,6 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -263,6 +257,18 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -419,6 +425,12 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -430,7 +442,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -446,7 +458,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { case "max_input_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -469,7 +481,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { } case "preserve_position_increments": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -483,7 +495,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { } case "preserve_separators": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -503,7 +515,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -524,7 +536,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -614,12 +626,6 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -638,6 +644,18 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -794,6 +812,12 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -828,7 +852,7 @@ func (s *CompletionProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/completionresult.go b/typedapi/types/completionresult.go new file mode 100644 index 0000000000..7464c2f78a --- /dev/null +++ b/typedapi/types/completionresult.go @@ -0,0 +1,76 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// CompletionResult type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/_types/Results.ts#L60-L65 +type CompletionResult struct { + Result string `json:"result"` +} + +func (s *CompletionResult) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "result": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Result", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Result = o + + } + } + return nil +} + +// NewCompletionResult returns a CompletionResult. +func NewCompletionResult() *CompletionResult { + r := &CompletionResult{} + + return r +} diff --git a/typedapi/types/completionstats.go b/typedapi/types/completionstats.go index e548596256..83776ce12a 100644 --- a/typedapi/types/completionstats.go +++ b/typedapi/types/completionstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CompletionStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L80-L90 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L80-L90 type CompletionStats struct { Fields map[string]FieldSizeUsage `json:"fields,omitempty"` // Size Total amount of memory used for completion across all shards assigned to @@ -71,7 +71,7 @@ func (s *CompletionStats) UnmarshalJSON(data []byte) error { } case "size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/completionsuggest.go b/typedapi/types/completionsuggest.go index 6647d5e13e..557fba061e 100644 --- a/typedapi/types/completionsuggest.go +++ b/typedapi/types/completionsuggest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CompletionSuggest type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L48-L55 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L48-L55 type CompletionSuggest struct { Length int `json:"length"` Offset int `json:"offset"` @@ -56,7 +56,7 @@ func (s *CompletionSuggest) UnmarshalJSON(data []byte) error { case "length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *CompletionSuggest) UnmarshalJSON(data []byte) error { case "offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/completionsuggester.go b/typedapi/types/completionsuggester.go index 79d3c19e35..ab7f278200 100644 --- a/typedapi/types/completionsuggester.go +++ b/typedapi/types/completionsuggester.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CompletionSuggester type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L160-L178 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L163-L181 type CompletionSuggester struct { // Analyzer The analyzer to analyze the suggest text with. // Defaults to the search analyzer of the suggest field. @@ -122,7 +122,7 @@ func (s *CompletionSuggester) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -137,7 +137,7 @@ func (s *CompletionSuggester) UnmarshalJSON(data []byte) error { } case "skip_duplicates": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/completionsuggestoption.go b/typedapi/types/completionsuggestoption.go index 0f56a95145..1799864b94 100644 --- a/typedapi/types/completionsuggestoption.go +++ b/typedapi/types/completionsuggestoption.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CompletionSuggestOption type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L73-L84 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L73-L84 type CompletionSuggestOption struct { CollateMatch *bool `json:"collate_match,omitempty"` Contexts map[string][]Context `json:"contexts,omitempty"` @@ -61,7 +61,7 @@ func (s *CompletionSuggestOption) UnmarshalJSON(data []byte) error { switch t { case "collate_match": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,7 +113,7 @@ func (s *CompletionSuggestOption) UnmarshalJSON(data []byte) error { } case "score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -129,7 +129,7 @@ func (s *CompletionSuggestOption) UnmarshalJSON(data []byte) error { } case "_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/componenttemplatenode.go b/typedapi/types/componenttemplatenode.go index 264ab64d3c..ad4767e573 100644 --- a/typedapi/types/componenttemplatenode.go +++ b/typedapi/types/componenttemplatenode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ComponentTemplateNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/_types/ComponentTemplate.ts#L35-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/_types/ComponentTemplate.ts#L35-L40 type ComponentTemplateNode struct { Meta_ Metadata `json:"_meta,omitempty"` Template ComponentTemplateSummary `json:"template"` diff --git a/typedapi/types/componenttemplatesummary.go b/typedapi/types/componenttemplatesummary.go index 1dbd6d8ce7..d919e32821 100644 --- a/typedapi/types/componenttemplatesummary.go +++ b/typedapi/types/componenttemplatesummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ComponentTemplateSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/_types/ComponentTemplate.ts#L42-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/_types/ComponentTemplate.ts#L42-L54 type ComponentTemplateSummary struct { Aliases map[string]AliasDefinition `json:"aliases,omitempty"` Lifecycle *DataStreamLifecycleWithRollover `json:"lifecycle,omitempty"` diff --git a/typedapi/types/compositeaggregate.go b/typedapi/types/compositeaggregate.go index a3df0360ba..f20887ab29 100644 --- a/typedapi/types/compositeaggregate.go +++ b/typedapi/types/compositeaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // CompositeAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L618-L623 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L622-L627 type CompositeAggregate struct { AfterKey CompositeAggregateKey `json:"after_key,omitempty"` Buckets BucketsCompositeBucket `json:"buckets"` diff --git a/typedapi/types/compositeaggregatekey.go b/typedapi/types/compositeaggregatekey.go index 601576e497..4e9951d204 100644 --- a/typedapi/types/compositeaggregatekey.go +++ b/typedapi/types/compositeaggregatekey.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // CompositeAggregateKey type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L118-L118 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L120-L120 type CompositeAggregateKey map[string]FieldValue diff --git a/typedapi/types/compositeaggregation.go b/typedapi/types/compositeaggregation.go index 935f49e5ba..286f9e3c0b 100644 --- a/typedapi/types/compositeaggregation.go +++ b/typedapi/types/compositeaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,13 +31,11 @@ import ( // CompositeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L120-L136 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L122-L138 type CompositeAggregation struct { // After When paginating, use the `after_key` value returned in the previous response // to retrieve the next page. After CompositeAggregateKey `json:"after,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` // Size The number of composite buckets that should be returned. Size *int `json:"size,omitempty"` // Sources The value sources used to build composite buckets. @@ -65,26 +63,9 @@ func (s *CompositeAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "After", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/compositeaggregationsource.go b/typedapi/types/compositeaggregationsource.go index 9074c583ba..88eccb12e7 100644 --- a/typedapi/types/compositeaggregationsource.go +++ b/typedapi/types/compositeaggregationsource.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // CompositeAggregationSource type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L138-L155 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L140-L157 type CompositeAggregationSource struct { // DateHistogram A date histogram aggregation. DateHistogram *CompositeDateHistogramAggregation `json:"date_histogram,omitempty"` diff --git a/typedapi/types/compositebucket.go b/typedapi/types/compositebucket.go index 922f656f02..5fc9db381d 100644 --- a/typedapi/types/compositebucket.go +++ b/typedapi/types/compositebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // CompositeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L625-L627 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L629-L631 type CompositeBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -55,7 +55,7 @@ func (s *CompositeBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +519,7 @@ func (s *CompositeBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -569,7 +569,7 @@ func (s *CompositeBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -579,7 +579,7 @@ func (s *CompositeBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -596,7 +596,7 @@ func (s *CompositeBucket) UnmarshalJSON(data []byte) error { func (s CompositeBucket) MarshalJSON() ([]byte, error) { type opt CompositeBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/compositedatehistogramaggregation.go b/typedapi/types/compositedatehistogramaggregation.go index e62fbd3256..dde3ffa976 100644 --- a/typedapi/types/compositedatehistogramaggregation.go +++ b/typedapi/types/compositedatehistogramaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // CompositeDateHistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L174-L182 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L176-L184 type CompositeDateHistogramAggregation struct { // CalendarInterval Either `calendar_interval` or `fixed_interval` must be present CalendarInterval *string `json:"calendar_interval,omitempty"` @@ -97,7 +97,7 @@ func (s *CompositeDateHistogramAggregation) UnmarshalJSON(data []byte) error { s.Format = &o case "missing_bucket": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/compositegeotilegridaggregation.go b/typedapi/types/compositegeotilegridaggregation.go index 713f545d19..c58a34aa3c 100644 --- a/typedapi/types/compositegeotilegridaggregation.go +++ b/typedapi/types/compositegeotilegridaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // CompositeGeoTileGridAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L184-L187 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L186-L189 type CompositeGeoTileGridAggregation struct { Bounds GeoBounds `json:"bounds,omitempty"` // Field Either `field` or `script` must be present @@ -75,7 +75,7 @@ func (s *CompositeGeoTileGridAggregation) UnmarshalJSON(data []byte) error { } case "missing_bucket": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *CompositeGeoTileGridAggregation) UnmarshalJSON(data []byte) error { case "precision": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/compositehistogramaggregation.go b/typedapi/types/compositehistogramaggregation.go index ffcb10651d..ddbacc0185 100644 --- a/typedapi/types/compositehistogramaggregation.go +++ b/typedapi/types/compositehistogramaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // CompositeHistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L170-L172 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L172-L174 type CompositeHistogramAggregation struct { // Field Either `field` or `script` must be present Field *string `json:"field,omitempty"` @@ -69,7 +69,7 @@ func (s *CompositeHistogramAggregation) UnmarshalJSON(data []byte) error { } case "interval": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *CompositeHistogramAggregation) UnmarshalJSON(data []byte) error { } case "missing_bucket": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/compositetermsaggregation.go b/typedapi/types/compositetermsaggregation.go index 9b800d2f79..51c7f9faf7 100644 --- a/typedapi/types/compositetermsaggregation.go +++ b/typedapi/types/compositetermsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // CompositeTermsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L168-L168 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L170-L170 type CompositeTermsAggregation struct { // Field Either `field` or `script` must be present Field *string `json:"field,omitempty"` @@ -68,7 +68,7 @@ func (s *CompositeTermsAggregation) UnmarshalJSON(data []byte) error { } case "missing_bucket": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/conditiontokenfilter.go b/typedapi/types/conditiontokenfilter.go index 6878c230e7..5d3eaadc84 100644 --- a/typedapi/types/conditiontokenfilter.go +++ b/typedapi/types/conditiontokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ConditionTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L181-L185 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L183-L187 type ConditionTokenFilter struct { Filter []string `json:"filter"` Script Script `json:"script"` diff --git a/typedapi/types/configuration.go b/typedapi/types/configuration.go index 24e7e9c91a..30f469c7b4 100644 --- a/typedapi/types/configuration.go +++ b/typedapi/types/configuration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Configuration type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/_types/SnapshotLifecycle.ts#L99-L129 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/_types/SnapshotLifecycle.ts#L99-L129 type Configuration struct { // FeatureStates A list of feature states to be included in this snapshot. A list of features // available for inclusion in the snapshot and their descriptions be can be @@ -85,7 +85,7 @@ func (s *Configuration) UnmarshalJSON(data []byte) error { } case "ignore_unavailable": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -99,7 +99,7 @@ func (s *Configuration) UnmarshalJSON(data []byte) error { } case "include_global_state": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -134,7 +134,7 @@ func (s *Configuration) UnmarshalJSON(data []byte) error { } case "partial": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/configurations.go b/typedapi/types/configurations.go index 34fd55a135..89a9e4715e 100644 --- a/typedapi/types/configurations.go +++ b/typedapi/types/configurations.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Configurations type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/_types/Phase.ts#L50-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/_types/Phase.ts#L50-L54 type Configurations struct { Forcemerge *ForceMergeConfiguration `json:"forcemerge,omitempty"` Rollover *RolloverConditions `json:"rollover,omitempty"` diff --git a/typedapi/types/confusionmatrixitem.go b/typedapi/types/confusionmatrixitem.go index f03337a1ad..09c4125587 100644 --- a/typedapi/types/confusionmatrixitem.go +++ b/typedapi/types/confusionmatrixitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ConfusionMatrixItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/types.ts#L125-L130 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/types.ts#L125-L130 type ConfusionMatrixItem struct { ActualClass string `json:"actual_class"` ActualClassDocCount int `json:"actual_class_doc_count"` @@ -61,7 +61,7 @@ func (s *ConfusionMatrixItem) UnmarshalJSON(data []byte) error { case "actual_class_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -77,7 +77,7 @@ func (s *ConfusionMatrixItem) UnmarshalJSON(data []byte) error { case "other_predicted_class_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/confusionmatrixprediction.go b/typedapi/types/confusionmatrixprediction.go index 5d0a106482..65c7357a22 100644 --- a/typedapi/types/confusionmatrixprediction.go +++ b/typedapi/types/confusionmatrixprediction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ConfusionMatrixPrediction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/types.ts#L132-L135 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/types.ts#L132-L135 type ConfusionMatrixPrediction struct { Count int `json:"count"` PredictedClass string `json:"predicted_class"` @@ -54,7 +54,7 @@ func (s *ConfusionMatrixPrediction) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/confusionmatrixthreshold.go b/typedapi/types/confusionmatrixthreshold.go index 22cbce1722..5f67be453d 100644 --- a/typedapi/types/confusionmatrixthreshold.go +++ b/typedapi/types/confusionmatrixthreshold.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ConfusionMatrixThreshold type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/types.ts#L137-L158 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/types.ts#L137-L158 type ConfusionMatrixThreshold struct { // FalseNegative False Negative FalseNegative int `json:"fn"` @@ -60,7 +60,7 @@ func (s *ConfusionMatrixThreshold) UnmarshalJSON(data []byte) error { case "fn": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -76,7 +76,7 @@ func (s *ConfusionMatrixThreshold) UnmarshalJSON(data []byte) error { case "fp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *ConfusionMatrixThreshold) UnmarshalJSON(data []byte) error { case "tn": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -108,7 +108,7 @@ func (s *ConfusionMatrixThreshold) UnmarshalJSON(data []byte) error { case "tp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/connection.go b/typedapi/types/connection.go index 37602c8225..22cddd4320 100644 --- a/typedapi/types/connection.go +++ b/typedapi/types/connection.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Connection type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/graph/_types/Connection.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/graph/_types/Connection.ts#L22-L27 type Connection struct { DocCount int64 `json:"doc_count"` Source int64 `json:"source"` @@ -55,7 +55,7 @@ func (s *Connection) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *Connection) UnmarshalJSON(data []byte) error { } case "source": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *Connection) UnmarshalJSON(data []byte) error { } case "target": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *Connection) UnmarshalJSON(data []byte) error { } case "weight": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/constantkeywordproperty.go b/typedapi/types/constantkeywordproperty.go index 6f49c00ef9..00a0ec90b6 100644 --- a/typedapi/types/constantkeywordproperty.go +++ b/typedapi/types/constantkeywordproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // ConstantKeywordProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/specialized.ts#L44-L47 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/specialized.ts#L50-L53 type ConstantKeywordProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -72,7 +72,7 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -93,7 +93,7 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -183,12 +183,6 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -207,6 +201,18 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -363,6 +369,12 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -374,7 +386,7 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -403,7 +415,7 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -424,7 +436,7 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -514,12 +526,6 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -538,6 +544,18 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -694,6 +712,12 @@ func (s *ConstantKeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { diff --git a/typedapi/types/constantscorequery.go b/typedapi/types/constantscorequery.go index ccdbaf9d48..557307fb5b 100644 --- a/typedapi/types/constantscorequery.go +++ b/typedapi/types/constantscorequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ConstantScoreQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L69-L76 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L70-L77 type ConstantScoreQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -63,7 +63,7 @@ func (s *ConstantScoreQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/context.go b/typedapi/types/context.go index c639e0d2a4..a26a0252ca 100644 --- a/typedapi/types/context.go +++ b/typedapi/types/context.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // string // GeoLocation // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L225-L230 -type Context interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L228-L233 +type Context any diff --git a/typedapi/types/contextmethod.go b/typedapi/types/contextmethod.go index 7caab099f2..380ae59b78 100644 --- a/typedapi/types/contextmethod.go +++ b/typedapi/types/contextmethod.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ContextMethod type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/get_script_context/types.ts#L27-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/get_script_context/types.ts#L27-L31 type ContextMethod struct { Name string `json:"name"` Params []ContextMethodParam `json:"params"` diff --git a/typedapi/types/contextmethodparam.go b/typedapi/types/contextmethodparam.go index eddb0bfaf9..c50ee8f5e5 100644 --- a/typedapi/types/contextmethodparam.go +++ b/typedapi/types/contextmethodparam.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ContextMethodParam type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/get_script_context/types.ts#L33-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/get_script_context/types.ts#L33-L36 type ContextMethodParam struct { Name string `json:"name"` Type string `json:"type"` diff --git a/typedapi/types/convertprocessor.go b/typedapi/types/convertprocessor.go index ac802df8dd..ad614995dc 100644 --- a/typedapi/types/convertprocessor.go +++ b/typedapi/types/convertprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // ConvertProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L445-L465 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L445-L465 type ConvertProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -104,7 +104,7 @@ func (s *ConvertProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -118,7 +118,7 @@ func (s *ConvertProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/coordinatorstats.go b/typedapi/types/coordinatorstats.go index 4c179aff54..70189ccd90 100644 --- a/typedapi/types/coordinatorstats.go +++ b/typedapi/types/coordinatorstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CoordinatorStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/enrich/stats/types.ts#L29-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/enrich/stats/types.ts#L29-L35 type CoordinatorStats struct { ExecutedSearchesTotal int64 `json:"executed_searches_total"` NodeId string `json:"node_id"` @@ -56,7 +56,7 @@ func (s *CoordinatorStats) UnmarshalJSON(data []byte) error { switch t { case "executed_searches_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -77,7 +77,7 @@ func (s *CoordinatorStats) UnmarshalJSON(data []byte) error { case "queue_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -93,7 +93,7 @@ func (s *CoordinatorStats) UnmarshalJSON(data []byte) error { case "remote_requests_current": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -108,7 +108,7 @@ func (s *CoordinatorStats) UnmarshalJSON(data []byte) error { } case "remote_requests_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/coordsgeobounds.go b/typedapi/types/coordsgeobounds.go index be1d47e5f6..da59a55415 100644 --- a/typedapi/types/coordsgeobounds.go +++ b/typedapi/types/coordsgeobounds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CoordsGeoBounds type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Geo.ts#L154-L159 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Geo.ts#L154-L159 type CoordsGeoBounds struct { Bottom Float64 `json:"bottom"` Left Float64 `json:"left"` @@ -55,7 +55,7 @@ func (s *CoordsGeoBounds) UnmarshalJSON(data []byte) error { switch t { case "bottom": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *CoordsGeoBounds) UnmarshalJSON(data []byte) error { } case "left": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *CoordsGeoBounds) UnmarshalJSON(data []byte) error { } case "right": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -103,7 +103,7 @@ func (s *CoordsGeoBounds) UnmarshalJSON(data []byte) error { } case "top": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/coreknnquery.go b/typedapi/types/coreknnquery.go index 8eaf37c794..25e3ed2a35 100644 --- a/typedapi/types/coreknnquery.go +++ b/typedapi/types/coreknnquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,14 +31,14 @@ import ( // CoreKnnQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/knn_search/_types/Knn.ts#L24-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/knn_search/_types/Knn.ts#L24-L33 type CoreKnnQuery struct { // Field The name of the vector field to search against Field string `json:"field"` // K The final number of nearest neighbors to return as top hits - K int64 `json:"k"` + K int `json:"k"` // NumCandidates The number of nearest neighbor candidates to consider per shard - NumCandidates int64 `json:"num_candidates"` + NumCandidates int `json:"num_candidates"` // QueryVector The query vector QueryVector []float32 `json:"query_vector"` } @@ -64,32 +64,34 @@ func (s *CoreKnnQuery) UnmarshalJSON(data []byte) error { } case "k": - var tmp interface{} + + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.ParseInt(v, 10, 64) + value, err := strconv.Atoi(v) if err != nil { return fmt.Errorf("%s | %w", "K", err) } s.K = value case float64: - f := int64(v) + f := int(v) s.K = f } case "num_candidates": - var tmp interface{} + + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.ParseInt(v, 10, 64) + value, err := strconv.Atoi(v) if err != nil { return fmt.Errorf("%s | %w", "NumCandidates", err) } s.NumCandidates = value case float64: - f := int64(v) + f := int(v) s.NumCandidates = f } diff --git a/typedapi/types/counter.go b/typedapi/types/counter.go index f93ccbe2fd..447655107f 100644 --- a/typedapi/types/counter.go +++ b/typedapi/types/counter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Counter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L35-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L35-L38 type Counter struct { Active int64 `json:"active"` Total int64 `json:"total"` @@ -53,7 +53,7 @@ func (s *Counter) UnmarshalJSON(data []byte) error { switch t { case "active": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -68,7 +68,7 @@ func (s *Counter) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/countrecord.go b/typedapi/types/countrecord.go index 695a276414..0d1a696618 100644 --- a/typedapi/types/countrecord.go +++ b/typedapi/types/countrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CountRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/count/types.ts#L23-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/count/types.ts#L23-L39 type CountRecord struct { // Count the document count Count *string `json:"count,omitempty"` diff --git a/typedapi/types/cpu.go b/typedapi/types/cpu.go index 6bc99b69ad..30bdd2c627 100644 --- a/typedapi/types/cpu.go +++ b/typedapi/types/cpu.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Cpu type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L539-L548 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L539-L548 type Cpu struct { LoadAverage map[string]Float64 `json:"load_average,omitempty"` Percent *int `json:"percent,omitempty"` @@ -68,7 +68,7 @@ func (s *Cpu) UnmarshalJSON(data []byte) error { case "percent": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/cpuacct.go b/typedapi/types/cpuacct.go index e6f3a0013d..6ff19bbb82 100644 --- a/typedapi/types/cpuacct.go +++ b/typedapi/types/cpuacct.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CpuAcct type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L476-L485 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L476-L485 type CpuAcct struct { // ControlGroup The `cpuacct` control group to which the Elasticsearch process belongs. ControlGroup *string `json:"control_group,omitempty"` diff --git a/typedapi/types/createdstatus.go b/typedapi/types/createdstatus.go index 691f4658de..2f3c66f705 100644 --- a/typedapi/types/createdstatus.go +++ b/typedapi/types/createdstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CreatedStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/CreatedStatus.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/CreatedStatus.ts#L20-L22 type CreatedStatus struct { Created bool `json:"created"` } @@ -52,7 +52,7 @@ func (s *CreatedStatus) UnmarshalJSON(data []byte) error { switch t { case "created": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/createoperation.go b/typedapi/types/createoperation.go index ef4f8adf20..53934f9f30 100644 --- a/typedapi/types/createoperation.go +++ b/typedapi/types/createoperation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // CreateOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/bulk/types.ts#L130-L130 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/bulk/types.ts#L130-L130 type CreateOperation struct { // DynamicTemplates A map from the full name of fields to the name of dynamic templates. // Defaults to an empty map. @@ -91,7 +91,7 @@ func (s *CreateOperation) UnmarshalJSON(data []byte) error { } case "if_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -128,7 +128,7 @@ func (s *CreateOperation) UnmarshalJSON(data []byte) error { s.Pipeline = &o case "require_alias": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/csvprocessor.go b/typedapi/types/csvprocessor.go index 6aefa8a3a5..812676b633 100644 --- a/typedapi/types/csvprocessor.go +++ b/typedapi/types/csvprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CsvProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L467-L500 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L467-L500 type CsvProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -115,7 +115,7 @@ func (s *CsvProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -129,7 +129,7 @@ func (s *CsvProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -200,7 +200,7 @@ func (s *CsvProcessor) UnmarshalJSON(data []byte) error { } case "trim": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/cumulativecardinalityaggregate.go b/typedapi/types/cumulativecardinalityaggregate.go index ca64fc806a..8034094617 100644 --- a/typedapi/types/cumulativecardinalityaggregate.go +++ b/typedapi/types/cumulativecardinalityaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CumulativeCardinalityAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L747-L755 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L754-L762 type CumulativeCardinalityAggregate struct { Meta Metadata `json:"meta,omitempty"` Value int64 `json:"value"` @@ -59,7 +59,7 @@ func (s *CumulativeCardinalityAggregate) UnmarshalJSON(data []byte) error { } case "value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/cumulativecardinalityaggregation.go b/typedapi/types/cumulativecardinalityaggregation.go index 17ff46fe9b..add66fe2c8 100644 --- a/typedapi/types/cumulativecardinalityaggregation.go +++ b/typedapi/types/cumulativecardinalityaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // CumulativeCardinalityAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L192-L192 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L192-L192 type CumulativeCardinalityAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,8 +43,6 @@ type CumulativeCardinalityAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` } func (s *CumulativeCardinalityAggregation) UnmarshalJSON(data []byte) error { @@ -84,23 +82,6 @@ func (s *CumulativeCardinalityAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/cumulativesumaggregation.go b/typedapi/types/cumulativesumaggregation.go index 2af527b3f3..f520591841 100644 --- a/typedapi/types/cumulativesumaggregation.go +++ b/typedapi/types/cumulativesumaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // CumulativeSumAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L194-L194 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L194-L194 type CumulativeSumAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,8 +43,6 @@ type CumulativeSumAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` } func (s *CumulativeSumAggregation) UnmarshalJSON(data []byte) error { @@ -84,23 +82,6 @@ func (s *CumulativeSumAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/currentnode.go b/typedapi/types/currentnode.go index e10358a002..4d0ece9735 100644 --- a/typedapi/types/currentnode.go +++ b/typedapi/types/currentnode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CurrentNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/allocation_explain/types.ts#L78-L84 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/allocation_explain/types.ts#L78-L84 type CurrentNode struct { Attributes map[string]string `json:"attributes"` Id string `json:"id"` @@ -80,7 +80,7 @@ func (s *CurrentNode) UnmarshalJSON(data []byte) error { case "weight_ranking": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/customanalyzer.go b/typedapi/types/customanalyzer.go index cf1d71bbb8..d035816cdf 100644 --- a/typedapi/types/customanalyzer.go +++ b/typedapi/types/customanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CustomAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/analyzers.ts#L28-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/analyzers.ts#L28-L35 type CustomAnalyzer struct { CharFilter []string `json:"char_filter,omitempty"` Filter []string `json:"filter,omitempty"` @@ -68,7 +68,7 @@ func (s *CustomAnalyzer) UnmarshalJSON(data []byte) error { case "position_increment_gap": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -84,7 +84,7 @@ func (s *CustomAnalyzer) UnmarshalJSON(data []byte) error { case "position_offset_gap": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/customcategorizetextanalyzer.go b/typedapi/types/customcategorizetextanalyzer.go index e71267cf66..8d7b46c42a 100644 --- a/typedapi/types/customcategorizetextanalyzer.go +++ b/typedapi/types/customcategorizetextanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // CustomCategorizeTextAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L1108-L1112 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L1116-L1120 type CustomCategorizeTextAnalyzer struct { CharFilter []string `json:"char_filter,omitempty"` Filter []string `json:"filter,omitempty"` diff --git a/typedapi/types/customnormalizer.go b/typedapi/types/customnormalizer.go index a0cedb8be7..17c9f773e8 100644 --- a/typedapi/types/customnormalizer.go +++ b/typedapi/types/customnormalizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // CustomNormalizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/normalizers.ts#L30-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/normalizers.ts#L30-L34 type CustomNormalizer struct { CharFilter []string `json:"char_filter,omitempty"` Filter []string `json:"filter,omitempty"` diff --git a/typedapi/types/dailyschedule.go b/typedapi/types/dailyschedule.go index bd37a273b2..2de366e1f1 100644 --- a/typedapi/types/dailyschedule.go +++ b/typedapi/types/dailyschedule.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DailySchedule type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Schedule.ts#L33-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Schedule.ts#L33-L35 type DailySchedule struct { At []ScheduleTimeOfDay `json:"at"` } diff --git a/typedapi/types/danglingindex.go b/typedapi/types/danglingindex.go index 6614df9a85..9359087e1e 100644 --- a/typedapi/types/danglingindex.go +++ b/typedapi/types/danglingindex.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DanglingIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/dangling_indices/list_dangling_indices/ListDanglingIndicesResponse.ts#L29-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/dangling_indices/list_dangling_indices/ListDanglingIndicesResponse.ts#L29-L34 type DanglingIndex struct { CreationDateMillis int64 `json:"creation_date_millis"` IndexName string `json:"index_name"` diff --git a/typedapi/types/datacounts.go b/typedapi/types/datacounts.go index 3ef2db4e9f..a8dca88d49 100644 --- a/typedapi/types/datacounts.go +++ b/typedapi/types/datacounts.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataCounts type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Job.ts#L352-L372 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Job.ts#L352-L372 type DataCounts struct { BucketCount int64 `json:"bucket_count"` EarliestRecordTimestamp *int64 `json:"earliest_record_timestamp,omitempty"` @@ -70,7 +70,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { switch t { case "bucket_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "earliest_record_timestamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "empty_bucket_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -115,7 +115,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "input_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -130,7 +130,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "input_field_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -145,7 +145,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "input_record_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -160,7 +160,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "invalid_date_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -180,7 +180,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "last_data_time": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -195,7 +195,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "latest_bucket_timestamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -210,7 +210,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "latest_empty_bucket_timestamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -225,7 +225,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "latest_record_timestamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -240,7 +240,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "latest_sparse_bucket_timestamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -255,7 +255,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "log_time": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -270,7 +270,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "missing_field_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -285,7 +285,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "out_of_order_timestamp_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -300,7 +300,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "processed_field_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -315,7 +315,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "processed_record_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -330,7 +330,7 @@ func (s *DataCounts) UnmarshalJSON(data []byte) error { } case "sparse_bucket_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datadescription.go b/typedapi/types/datadescription.go index 462a0635ac..002cf23fa9 100644 --- a/typedapi/types/datadescription.go +++ b/typedapi/types/datadescription.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataDescription type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Job.ts#L374-L390 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Job.ts#L374-L390 type DataDescription struct { FieldDelimiter *string `json:"field_delimiter,omitempty"` // Format Only JSON format is supported at this time. diff --git a/typedapi/types/dataemailattachment.go b/typedapi/types/dataemailattachment.go index 6f62caf6e2..bbef415442 100644 --- a/typedapi/types/dataemailattachment.go +++ b/typedapi/types/dataemailattachment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // DataEmailAttachment type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L234-L236 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L234-L236 type DataEmailAttachment struct { Format *dataattachmentformat.DataAttachmentFormat `json:"format,omitempty"` } diff --git a/typedapi/types/datafeedauthorization.go b/typedapi/types/datafeedauthorization.go index 9dded31270..c9b5a53e3f 100644 --- a/typedapi/types/datafeedauthorization.go +++ b/typedapi/types/datafeedauthorization.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DatafeedAuthorization type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Authorization.ts#L31-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Authorization.ts#L31-L43 type DatafeedAuthorization struct { // ApiKey If an API key was used for the most recent update to the datafeed, its name // and identifier are listed in the response. diff --git a/typedapi/types/datafeedconfig.go b/typedapi/types/datafeedconfig.go index 238b715505..a003b9c92f 100644 --- a/typedapi/types/datafeedconfig.go +++ b/typedapi/types/datafeedconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DatafeedConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Datafeed.ts#L60-L117 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Datafeed.ts#L59-L116 type DatafeedConfig struct { // Aggregations If set, the datafeed performs aggregation searches. Support for aggregations // is limited and should be used only with low cardinality data. @@ -145,8 +145,19 @@ func (s *DatafeedConfig) UnmarshalJSON(data []byte) error { } case "indices", "indexes": - if err := dec.Decode(&s.Indices); err != nil { - return fmt.Errorf("%s | %w", "Indices", err) + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Indices", err) + } + + s.Indices = append(s.Indices, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Indices); err != nil { + return fmt.Errorf("%s | %w", "Indices", err) + } } case "indices_options": @@ -161,7 +172,7 @@ func (s *DatafeedConfig) UnmarshalJSON(data []byte) error { case "max_empty_searches": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -200,7 +211,7 @@ func (s *DatafeedConfig) UnmarshalJSON(data []byte) error { case "scroll_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datafeedrunningstate.go b/typedapi/types/datafeedrunningstate.go index 50beb1bb2e..514d6dd01c 100644 --- a/typedapi/types/datafeedrunningstate.go +++ b/typedapi/types/datafeedrunningstate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DatafeedRunningState type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Datafeed.ts#L198-L212 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Datafeed.ts#L197-L211 type DatafeedRunningState struct { // RealTimeConfigured Indicates if the datafeed is "real-time"; meaning that the datafeed has no // configured `end` time. @@ -61,7 +61,7 @@ func (s *DatafeedRunningState) UnmarshalJSON(data []byte) error { switch t { case "real_time_configured": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -75,7 +75,7 @@ func (s *DatafeedRunningState) UnmarshalJSON(data []byte) error { } case "real_time_running": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datafeeds.go b/typedapi/types/datafeeds.go index 65e457a77e..32f1cf83cf 100644 --- a/typedapi/types/datafeeds.go +++ b/typedapi/types/datafeeds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Datafeeds type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/info/types.ts#L40-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/info/types.ts#L40-L42 type Datafeeds struct { ScrollSize int `json:"scroll_size"` } @@ -53,7 +53,7 @@ func (s *Datafeeds) UnmarshalJSON(data []byte) error { case "scroll_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datafeedsrecord.go b/typedapi/types/datafeedsrecord.go index 666d6cc924..8e956a3ef0 100644 --- a/typedapi/types/datafeedsrecord.go +++ b/typedapi/types/datafeedsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DatafeedsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/ml_datafeeds/types.ts#L22-L87 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/ml_datafeeds/types.ts#L22-L87 type DatafeedsRecord struct { // AssignmentExplanation For started datafeeds only, contains messages relating to the selection of a // node. diff --git a/typedapi/types/datafeedstats.go b/typedapi/types/datafeedstats.go index c0f414e0c8..b6f9bd8143 100644 --- a/typedapi/types/datafeedstats.go +++ b/typedapi/types/datafeedstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DatafeedStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Datafeed.ts#L140-L169 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Datafeed.ts#L139-L168 type DatafeedStats struct { // AssignmentExplanation For started datafeeds only, contains messages relating to the selection of a // node. diff --git a/typedapi/types/datafeedtimingstats.go b/typedapi/types/datafeedtimingstats.go index e96b055e83..2403a5a312 100644 --- a/typedapi/types/datafeedtimingstats.go +++ b/typedapi/types/datafeedtimingstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DatafeedTimingStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Datafeed.ts#L171-L196 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Datafeed.ts#L170-L195 type DatafeedTimingStats struct { // AverageSearchTimePerBucketMs The average search time per bucket, in milliseconds. AverageSearchTimePerBucketMs Float64 `json:"average_search_time_per_bucket_ms,omitempty"` @@ -68,7 +68,7 @@ func (s *DatafeedTimingStats) UnmarshalJSON(data []byte) error { } case "bucket_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -93,7 +93,7 @@ func (s *DatafeedTimingStats) UnmarshalJSON(data []byte) error { } case "search_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeanalysis.go b/typedapi/types/dataframeanalysis.go index 5773238fe6..7562ff4613 100644 --- a/typedapi/types/dataframeanalysis.go +++ b/typedapi/types/dataframeanalysis.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeAnalysis type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L134-L213 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L134-L213 type DataframeAnalysis struct { // Alpha Advanced configuration option. Machine learning uses loss guided tree // growing, which means that the decision trees grow where the regularized loss @@ -164,7 +164,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { switch t { case "alpha": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -192,7 +192,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { s.DependentVariable = o case "downsample_factor": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -208,7 +208,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { } case "early_stopping_enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -222,7 +222,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { } case "eta": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -238,7 +238,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { } case "eta_growth_rate_per_tree": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -254,7 +254,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { } case "feature_bag_fraction": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -275,7 +275,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { } case "gamma": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -291,7 +291,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { } case "lambda": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -308,7 +308,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case "max_optimization_rounds_per_hyperparameter": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -324,7 +324,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case "max_trees", "maximum_number_trees": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -340,7 +340,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case "num_top_feature_importance_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -360,7 +360,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { } case "randomize_seed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -377,7 +377,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { case "soft_tree_depth_limit": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -392,7 +392,7 @@ func (s *DataframeAnalysis) UnmarshalJSON(data []byte) error { } case "soft_tree_depth_tolerance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeanalysisanalyzedfields.go b/typedapi/types/dataframeanalysisanalyzedfields.go index d1239e408f..7c5ade8edc 100644 --- a/typedapi/types/dataframeanalysisanalyzedfields.go +++ b/typedapi/types/dataframeanalysisanalyzedfields.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DataframeAnalysisAnalyzedFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L238-L244 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L238-L244 type DataframeAnalysisAnalyzedFields struct { // Excludes An array of strings that defines the fields that will be included in the // analysis. diff --git a/typedapi/types/dataframeanalysisclassification.go b/typedapi/types/dataframeanalysisclassification.go index fe501e8b1a..8b3374d1ec 100644 --- a/typedapi/types/dataframeanalysisclassification.go +++ b/typedapi/types/dataframeanalysisclassification.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeAnalysisClassification type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L227-L236 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L227-L236 type DataframeAnalysisClassification struct { // Alpha Advanced configuration option. Machine learning uses loss guided tree // growing, which means that the decision trees grow where the regularized loss @@ -173,7 +173,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { switch t { case "alpha": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -213,7 +213,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { s.DependentVariable = o case "downsample_factor": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -229,7 +229,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { } case "early_stopping_enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -243,7 +243,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { } case "eta": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -259,7 +259,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { } case "eta_growth_rate_per_tree": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -275,7 +275,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { } case "feature_bag_fraction": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -296,7 +296,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { } case "gamma": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -312,7 +312,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { } case "lambda": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -329,7 +329,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case "max_optimization_rounds_per_hyperparameter": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -345,7 +345,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case "max_trees", "maximum_number_trees": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -361,7 +361,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case "num_top_classes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -377,7 +377,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case "num_top_feature_importance_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -397,7 +397,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { } case "randomize_seed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -414,7 +414,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { case "soft_tree_depth_limit": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -429,7 +429,7 @@ func (s *DataframeAnalysisClassification) UnmarshalJSON(data []byte) error { } case "soft_tree_depth_tolerance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeanalysiscontainer.go b/typedapi/types/dataframeanalysiscontainer.go index d8267ab735..0d0d735d71 100644 --- a/typedapi/types/dataframeanalysiscontainer.go +++ b/typedapi/types/dataframeanalysiscontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DataframeAnalysisContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L84-L101 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L84-L101 type DataframeAnalysisContainer struct { // Classification The configuration information necessary to perform classification. Classification *DataframeAnalysisClassification `json:"classification,omitempty"` diff --git a/typedapi/types/dataframeanalysisfeatureprocessor.go b/typedapi/types/dataframeanalysisfeatureprocessor.go index 2c674e9676..801439e112 100644 --- a/typedapi/types/dataframeanalysisfeatureprocessor.go +++ b/typedapi/types/dataframeanalysisfeatureprocessor.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DataframeAnalysisFeatureProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L246-L258 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L246-L258 type DataframeAnalysisFeatureProcessor struct { // FrequencyEncoding The configuration information necessary to perform frequency encoding. FrequencyEncoding *DataframeAnalysisFeatureProcessorFrequencyEncoding `json:"frequency_encoding,omitempty"` diff --git a/typedapi/types/dataframeanalysisfeatureprocessorfrequencyencoding.go b/typedapi/types/dataframeanalysisfeatureprocessorfrequencyencoding.go index f9604dd0c0..02c0b1810d 100644 --- a/typedapi/types/dataframeanalysisfeatureprocessorfrequencyencoding.go +++ b/typedapi/types/dataframeanalysisfeatureprocessorfrequencyencoding.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DataframeAnalysisFeatureProcessorFrequencyEncoding type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L260-L267 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L260-L267 type DataframeAnalysisFeatureProcessorFrequencyEncoding struct { // FeatureName The resulting feature name. FeatureName string `json:"feature_name"` diff --git a/typedapi/types/dataframeanalysisfeatureprocessormultiencoding.go b/typedapi/types/dataframeanalysisfeatureprocessormultiencoding.go index 280eb9498d..9b1f38bd55 100644 --- a/typedapi/types/dataframeanalysisfeatureprocessormultiencoding.go +++ b/typedapi/types/dataframeanalysisfeatureprocessormultiencoding.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DataframeAnalysisFeatureProcessorMultiEncoding type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L269-L272 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L269-L272 type DataframeAnalysisFeatureProcessorMultiEncoding struct { // Processors The ordered array of custom processors to execute. Must be more than 1. Processors []int `json:"processors"` diff --git a/typedapi/types/dataframeanalysisfeatureprocessorngramencoding.go b/typedapi/types/dataframeanalysisfeatureprocessorngramencoding.go index 34efdac0f2..b88cba845d 100644 --- a/typedapi/types/dataframeanalysisfeatureprocessorngramencoding.go +++ b/typedapi/types/dataframeanalysisfeatureprocessorngramencoding.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeAnalysisFeatureProcessorNGramEncoding type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L274-L286 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L274-L286 type DataframeAnalysisFeatureProcessorNGramEncoding struct { Custom *bool `json:"custom,omitempty"` // FeaturePrefix The feature name prefix. Defaults to ngram__. @@ -65,7 +65,7 @@ func (s *DataframeAnalysisFeatureProcessorNGramEncoding) UnmarshalJSON(data []by switch t { case "custom": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -97,7 +97,7 @@ func (s *DataframeAnalysisFeatureProcessorNGramEncoding) UnmarshalJSON(data []by case "length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -118,7 +118,7 @@ func (s *DataframeAnalysisFeatureProcessorNGramEncoding) UnmarshalJSON(data []by case "start": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeanalysisfeatureprocessoronehotencoding.go b/typedapi/types/dataframeanalysisfeatureprocessoronehotencoding.go index fece9ffeca..6434f18890 100644 --- a/typedapi/types/dataframeanalysisfeatureprocessoronehotencoding.go +++ b/typedapi/types/dataframeanalysisfeatureprocessoronehotencoding.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeAnalysisFeatureProcessorOneHotEncoding type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L288-L293 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L288-L293 type DataframeAnalysisFeatureProcessorOneHotEncoding struct { // Field The name of the field to encode. Field string `json:"field"` diff --git a/typedapi/types/dataframeanalysisfeatureprocessortargetmeanencoding.go b/typedapi/types/dataframeanalysisfeatureprocessortargetmeanencoding.go index eb1186efd0..e38d31e50a 100644 --- a/typedapi/types/dataframeanalysisfeatureprocessortargetmeanencoding.go +++ b/typedapi/types/dataframeanalysisfeatureprocessortargetmeanencoding.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeAnalysisFeatureProcessorTargetMeanEncoding type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L295-L304 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L295-L304 type DataframeAnalysisFeatureProcessorTargetMeanEncoding struct { // DefaultValue The default value if field value is not found in the target_map. DefaultValue int `json:"default_value"` @@ -60,7 +60,7 @@ func (s *DataframeAnalysisFeatureProcessorTargetMeanEncoding) UnmarshalJSON(data case "default_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeanalysisoutlierdetection.go b/typedapi/types/dataframeanalysisoutlierdetection.go index ab5e175a62..851da06a8e 100644 --- a/typedapi/types/dataframeanalysisoutlierdetection.go +++ b/typedapi/types/dataframeanalysisoutlierdetection.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeAnalysisOutlierDetection type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L103-L132 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L103-L132 type DataframeAnalysisOutlierDetection struct { // ComputeFeatureInfluence Specifies whether the feature influence calculation is enabled. ComputeFeatureInfluence *bool `json:"compute_feature_influence,omitempty"` @@ -75,7 +75,7 @@ func (s *DataframeAnalysisOutlierDetection) UnmarshalJSON(data []byte) error { switch t { case "compute_feature_influence": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -89,7 +89,7 @@ func (s *DataframeAnalysisOutlierDetection) UnmarshalJSON(data []byte) error { } case "feature_influence_threshold": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -118,7 +118,7 @@ func (s *DataframeAnalysisOutlierDetection) UnmarshalJSON(data []byte) error { case "n_neighbors": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -133,7 +133,7 @@ func (s *DataframeAnalysisOutlierDetection) UnmarshalJSON(data []byte) error { } case "outlier_fraction": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -149,7 +149,7 @@ func (s *DataframeAnalysisOutlierDetection) UnmarshalJSON(data []byte) error { } case "standardization_enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeanalysisregression.go b/typedapi/types/dataframeanalysisregression.go index 5a69ffd0e2..b788d57013 100644 --- a/typedapi/types/dataframeanalysisregression.go +++ b/typedapi/types/dataframeanalysisregression.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeAnalysisRegression type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L215-L225 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L215-L225 type DataframeAnalysisRegression struct { // Alpha Advanced configuration option. Machine learning uses loss guided tree // growing, which means that the decision trees grow where the regularized loss @@ -170,7 +170,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { switch t { case "alpha": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -198,7 +198,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { s.DependentVariable = o case "downsample_factor": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -214,7 +214,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { } case "early_stopping_enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -228,7 +228,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { } case "eta": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -244,7 +244,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { } case "eta_growth_rate_per_tree": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -260,7 +260,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { } case "feature_bag_fraction": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -281,7 +281,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { } case "gamma": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -297,7 +297,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { } case "lambda": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -325,7 +325,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { s.LossFunction = &o case "loss_function_parameter": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -342,7 +342,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case "max_optimization_rounds_per_hyperparameter": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -358,7 +358,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case "max_trees", "maximum_number_trees": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -374,7 +374,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case "num_top_feature_importance_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -394,7 +394,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { } case "randomize_seed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -411,7 +411,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { case "soft_tree_depth_limit": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -426,7 +426,7 @@ func (s *DataframeAnalysisRegression) UnmarshalJSON(data []byte) error { } case "soft_tree_depth_tolerance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeanalytics.go b/typedapi/types/dataframeanalytics.go index b8679e5dd6..295c7498be 100644 --- a/typedapi/types/dataframeanalytics.go +++ b/typedapi/types/dataframeanalytics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DataframeAnalytics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L324-L344 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L324-L344 type DataframeAnalytics struct { // AnalysisStats An object containing information about the analysis job. AnalysisStats *DataframeAnalyticsStatsContainer `json:"analysis_stats,omitempty"` diff --git a/typedapi/types/dataframeanalyticsauthorization.go b/typedapi/types/dataframeanalyticsauthorization.go index 778b3f6bee..163bfbb276 100644 --- a/typedapi/types/dataframeanalyticsauthorization.go +++ b/typedapi/types/dataframeanalyticsauthorization.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeAnalyticsAuthorization type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Authorization.ts#L45-L57 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Authorization.ts#L45-L57 type DataframeAnalyticsAuthorization struct { // ApiKey If an API key was used for the most recent update to the job, its name and // identifier are listed in the response. diff --git a/typedapi/types/dataframeanalyticsdestination.go b/typedapi/types/dataframeanalyticsdestination.go index 95c4da3c94..cfb8b8ec4f 100644 --- a/typedapi/types/dataframeanalyticsdestination.go +++ b/typedapi/types/dataframeanalyticsdestination.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DataframeAnalyticsDestination type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L77-L82 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L77-L82 type DataframeAnalyticsDestination struct { // Index Defines the destination index to store the results of the data frame // analytics job. diff --git a/typedapi/types/dataframeanalyticsfieldselection.go b/typedapi/types/dataframeanalyticsfieldselection.go index 738a904aaf..f425ae7d37 100644 --- a/typedapi/types/dataframeanalyticsfieldselection.go +++ b/typedapi/types/dataframeanalyticsfieldselection.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeAnalyticsFieldSelection type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L55-L68 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L55-L68 type DataframeAnalyticsFieldSelection struct { // FeatureType The feature type of this field for the analysis. May be categorical or // numerical. @@ -76,7 +76,7 @@ func (s *DataframeAnalyticsFieldSelection) UnmarshalJSON(data []byte) error { s.FeatureType = &o case "is_included": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -90,7 +90,7 @@ func (s *DataframeAnalyticsFieldSelection) UnmarshalJSON(data []byte) error { } case "is_required": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeanalyticsmemoryestimation.go b/typedapi/types/dataframeanalyticsmemoryestimation.go index 0889975a9d..8e0764668c 100644 --- a/typedapi/types/dataframeanalyticsmemoryestimation.go +++ b/typedapi/types/dataframeanalyticsmemoryestimation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeAnalyticsMemoryEstimation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L70-L75 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L70-L75 type DataframeAnalyticsMemoryEstimation struct { // ExpectedMemoryWithDisk Estimated memory usage under the assumption that overflowing to disk is // allowed during data frame analytics. expected_memory_with_disk is usually diff --git a/typedapi/types/dataframeanalyticsrecord.go b/typedapi/types/dataframeanalyticsrecord.go index 543a208fc3..f951057295 100644 --- a/typedapi/types/dataframeanalyticsrecord.go +++ b/typedapi/types/dataframeanalyticsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataFrameAnalyticsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/ml_data_frame_analytics/types.ts#L22-L102 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/ml_data_frame_analytics/types.ts#L22-L102 type DataFrameAnalyticsRecord struct { // AssignmentExplanation Messages related to the selection of a node. AssignmentExplanation *string `json:"assignment_explanation,omitempty"` diff --git a/typedapi/types/dataframeanalyticssource.go b/typedapi/types/dataframeanalyticssource.go index a1f669f06f..63ea055642 100644 --- a/typedapi/types/dataframeanalyticssource.go +++ b/typedapi/types/dataframeanalyticssource.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DataframeAnalyticsSource type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L39-L53 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L39-L53 type DataframeAnalyticsSource struct { // Index Index or indices on which to perform the analysis. It can be a single index // or index pattern as well as an array of indices or patterns. NOTE: If your diff --git a/typedapi/types/dataframeanalyticsstatscontainer.go b/typedapi/types/dataframeanalyticsstatscontainer.go index 3cf4321625..baab6f4e83 100644 --- a/typedapi/types/dataframeanalyticsstatscontainer.go +++ b/typedapi/types/dataframeanalyticsstatscontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DataframeAnalyticsStatsContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L373-L381 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L373-L381 type DataframeAnalyticsStatsContainer struct { // ClassificationStats An object containing information about the classification analysis job. ClassificationStats *DataframeAnalyticsStatsHyperparameters `json:"classification_stats,omitempty"` diff --git a/typedapi/types/dataframeanalyticsstatsdatacounts.go b/typedapi/types/dataframeanalyticsstatsdatacounts.go index d86725eef4..e999389829 100644 --- a/typedapi/types/dataframeanalyticsstatsdatacounts.go +++ b/typedapi/types/dataframeanalyticsstatsdatacounts.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeAnalyticsStatsDataCounts type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L364-L371 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L364-L371 type DataframeAnalyticsStatsDataCounts struct { // SkippedDocsCount The number of documents that are skipped during the analysis because they // contained values that are not supported by the analysis. For example, outlier @@ -63,7 +63,7 @@ func (s *DataframeAnalyticsStatsDataCounts) UnmarshalJSON(data []byte) error { case "skipped_docs_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -79,7 +79,7 @@ func (s *DataframeAnalyticsStatsDataCounts) UnmarshalJSON(data []byte) error { case "test_docs_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *DataframeAnalyticsStatsDataCounts) UnmarshalJSON(data []byte) error { case "training_docs_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeanalyticsstatshyperparameters.go b/typedapi/types/dataframeanalyticsstatshyperparameters.go index 38f1e2befd..afff0a7b53 100644 --- a/typedapi/types/dataframeanalyticsstatshyperparameters.go +++ b/typedapi/types/dataframeanalyticsstatshyperparameters.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeAnalyticsStatsHyperparameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L383-L402 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L383-L402 type DataframeAnalyticsStatsHyperparameters struct { // Hyperparameters An object containing the parameters of the classification analysis job. Hyperparameters Hyperparameters `json:"hyperparameters"` @@ -68,7 +68,7 @@ func (s *DataframeAnalyticsStatsHyperparameters) UnmarshalJSON(data []byte) erro case "iteration": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeanalyticsstatsmemoryusage.go b/typedapi/types/dataframeanalyticsstatsmemoryusage.go index 4598a20c57..0cb394683b 100644 --- a/typedapi/types/dataframeanalyticsstatsmemoryusage.go +++ b/typedapi/types/dataframeanalyticsstatsmemoryusage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeAnalyticsStatsMemoryUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L353-L362 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L353-L362 type DataframeAnalyticsStatsMemoryUsage struct { // MemoryReestimateBytes This value is present when the status is hard_limit and it is a new estimate // of how much memory the job needs. @@ -60,7 +60,7 @@ func (s *DataframeAnalyticsStatsMemoryUsage) UnmarshalJSON(data []byte) error { switch t { case "memory_reestimate_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -75,7 +75,7 @@ func (s *DataframeAnalyticsStatsMemoryUsage) UnmarshalJSON(data []byte) error { } case "peak_usage_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeanalyticsstatsoutlierdetection.go b/typedapi/types/dataframeanalyticsstatsoutlierdetection.go index dfdc8d8760..309edcc28f 100644 --- a/typedapi/types/dataframeanalyticsstatsoutlierdetection.go +++ b/typedapi/types/dataframeanalyticsstatsoutlierdetection.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DataframeAnalyticsStatsOutlierDetection type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L404-L417 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L404-L417 type DataframeAnalyticsStatsOutlierDetection struct { // Parameters The list of job parameters specified by the user or determined by algorithmic // heuristics. diff --git a/typedapi/types/dataframeanalyticsstatsprogress.go b/typedapi/types/dataframeanalyticsstatsprogress.go index eb99c84823..dddfa1668d 100644 --- a/typedapi/types/dataframeanalyticsstatsprogress.go +++ b/typedapi/types/dataframeanalyticsstatsprogress.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeAnalyticsStatsProgress type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L346-L351 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L346-L351 type DataframeAnalyticsStatsProgress struct { // Phase Defines the phase of the data frame analytics job. Phase string `json:"phase"` @@ -69,7 +69,7 @@ func (s *DataframeAnalyticsStatsProgress) UnmarshalJSON(data []byte) error { case "progress_percent": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeanalyticssummary.go b/typedapi/types/dataframeanalyticssummary.go index 76a8a8cc97..e83ec5663d 100644 --- a/typedapi/types/dataframeanalyticssummary.go +++ b/typedapi/types/dataframeanalyticssummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeAnalyticsSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L306-L322 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L306-L322 type DataframeAnalyticsSummary struct { AllowLazyStart *bool `json:"allow_lazy_start,omitempty"` Analysis DataframeAnalysisContainer `json:"analysis"` @@ -66,7 +66,7 @@ func (s *DataframeAnalyticsSummary) UnmarshalJSON(data []byte) error { switch t { case "allow_lazy_start": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -123,7 +123,7 @@ func (s *DataframeAnalyticsSummary) UnmarshalJSON(data []byte) error { case "max_num_threads": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeclassificationsummary.go b/typedapi/types/dataframeclassificationsummary.go index 22757bb5b3..6f0178872c 100644 --- a/typedapi/types/dataframeclassificationsummary.go +++ b/typedapi/types/dataframeclassificationsummary.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DataframeClassificationSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/types.ts#L44-L66 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/types.ts#L44-L66 type DataframeClassificationSummary struct { // Accuracy Accuracy of predictions (per-class and overall). Accuracy *DataframeClassificationSummaryAccuracy `json:"accuracy,omitempty"` diff --git a/typedapi/types/dataframeclassificationsummaryaccuracy.go b/typedapi/types/dataframeclassificationsummaryaccuracy.go index ac7ea64b17..c7721d0737 100644 --- a/typedapi/types/dataframeclassificationsummaryaccuracy.go +++ b/typedapi/types/dataframeclassificationsummaryaccuracy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeClassificationSummaryAccuracy type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/types.ts#L111-L114 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/types.ts#L111-L114 type DataframeClassificationSummaryAccuracy struct { Classes []DataframeEvaluationClass `json:"classes"` OverallAccuracy Float64 `json:"overall_accuracy"` @@ -58,7 +58,7 @@ func (s *DataframeClassificationSummaryAccuracy) UnmarshalJSON(data []byte) erro } case "overall_accuracy": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeclassificationsummarymulticlassconfusionmatrix.go b/typedapi/types/dataframeclassificationsummarymulticlassconfusionmatrix.go index 723d45327c..ae41218011 100644 --- a/typedapi/types/dataframeclassificationsummarymulticlassconfusionmatrix.go +++ b/typedapi/types/dataframeclassificationsummarymulticlassconfusionmatrix.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeClassificationSummaryMulticlassConfusionMatrix type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/types.ts#L120-L123 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/types.ts#L120-L123 type DataframeClassificationSummaryMulticlassConfusionMatrix struct { ConfusionMatrix []ConfusionMatrixItem `json:"confusion_matrix"` OtherActualClassCount int `json:"other_actual_class_count"` @@ -59,7 +59,7 @@ func (s *DataframeClassificationSummaryMulticlassConfusionMatrix) UnmarshalJSON( case "other_actual_class_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeclassificationsummaryprecision.go b/typedapi/types/dataframeclassificationsummaryprecision.go index ebb667bf30..a7c70c8ba9 100644 --- a/typedapi/types/dataframeclassificationsummaryprecision.go +++ b/typedapi/types/dataframeclassificationsummaryprecision.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeClassificationSummaryPrecision type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/types.ts#L101-L104 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/types.ts#L101-L104 type DataframeClassificationSummaryPrecision struct { AvgPrecision Float64 `json:"avg_precision"` Classes []DataframeEvaluationClass `json:"classes"` @@ -53,7 +53,7 @@ func (s *DataframeClassificationSummaryPrecision) UnmarshalJSON(data []byte) err switch t { case "avg_precision": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeclassificationsummaryrecall.go b/typedapi/types/dataframeclassificationsummaryrecall.go index 7b4cadb451..664fef4d8f 100644 --- a/typedapi/types/dataframeclassificationsummaryrecall.go +++ b/typedapi/types/dataframeclassificationsummaryrecall.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeClassificationSummaryRecall type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/types.ts#L106-L109 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/types.ts#L106-L109 type DataframeClassificationSummaryRecall struct { AvgRecall Float64 `json:"avg_recall"` Classes []DataframeEvaluationClass `json:"classes"` @@ -53,7 +53,7 @@ func (s *DataframeClassificationSummaryRecall) UnmarshalJSON(data []byte) error switch t { case "avg_recall": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeevaluationclass.go b/typedapi/types/dataframeevaluationclass.go index 938015967b..ebdf9b48ea 100644 --- a/typedapi/types/dataframeevaluationclass.go +++ b/typedapi/types/dataframeevaluationclass.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeEvaluationClass type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/types.ts#L116-L118 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/types.ts#L116-L118 type DataframeEvaluationClass struct { ClassName string `json:"class_name"` Value Float64 `json:"value"` @@ -58,7 +58,7 @@ func (s *DataframeEvaluationClass) UnmarshalJSON(data []byte) error { } case "value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeevaluationclassification.go b/typedapi/types/dataframeevaluationclassification.go index 00915e7231..7155e90617 100644 --- a/typedapi/types/dataframeevaluationclassification.go +++ b/typedapi/types/dataframeevaluationclassification.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DataframeEvaluationClassification type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeEvaluation.ts#L35-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeEvaluation.ts#L35-L44 type DataframeEvaluationClassification struct { // ActualField The field of the index which contains the ground truth. The data type of this // field can be boolean or integer. If the data type is integer, the value has diff --git a/typedapi/types/dataframeevaluationclassificationmetrics.go b/typedapi/types/dataframeevaluationclassificationmetrics.go index 0faad4e7a8..9226e81b48 100644 --- a/typedapi/types/dataframeevaluationclassificationmetrics.go +++ b/typedapi/types/dataframeevaluationclassificationmetrics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // DataframeEvaluationClassificationMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeEvaluation.ts#L73-L78 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeEvaluation.ts#L73-L78 type DataframeEvaluationClassificationMetrics struct { // Accuracy Accuracy of predictions (per-class and overall). Accuracy map[string]json.RawMessage `json:"accuracy,omitempty"` diff --git a/typedapi/types/dataframeevaluationclassificationmetricsaucroc.go b/typedapi/types/dataframeevaluationclassificationmetricsaucroc.go index 043d765284..8fe89f901c 100644 --- a/typedapi/types/dataframeevaluationclassificationmetricsaucroc.go +++ b/typedapi/types/dataframeevaluationclassificationmetricsaucroc.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeEvaluationClassificationMetricsAucRoc type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeEvaluation.ts#L85-L90 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeEvaluation.ts#L85-L90 type DataframeEvaluationClassificationMetricsAucRoc struct { // ClassName Name of the only class that is treated as positive during AUC ROC // calculation. Other classes are treated as negative ("one-vs-all" strategy). @@ -64,7 +64,7 @@ func (s *DataframeEvaluationClassificationMetricsAucRoc) UnmarshalJSON(data []by } case "include_curve": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeevaluationcontainer.go b/typedapi/types/dataframeevaluationcontainer.go index 9d032953c7..f15b1e2427 100644 --- a/typedapi/types/dataframeevaluationcontainer.go +++ b/typedapi/types/dataframeevaluationcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DataframeEvaluationContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeEvaluation.ts#L25-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeEvaluation.ts#L25-L33 type DataframeEvaluationContainer struct { // Classification Classification evaluation evaluates the results of a classification analysis // which outputs a prediction that identifies to which of the classes each diff --git a/typedapi/types/dataframeevaluationmetrics.go b/typedapi/types/dataframeevaluationmetrics.go index 84637fa9fd..4f04a13732 100644 --- a/typedapi/types/dataframeevaluationmetrics.go +++ b/typedapi/types/dataframeevaluationmetrics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // DataframeEvaluationMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeEvaluation.ts#L64-L71 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeEvaluation.ts#L64-L71 type DataframeEvaluationMetrics struct { // AucRoc The AUC ROC (area under the curve of the receiver operating characteristic) // score and optionally the curve. It is calculated for a specific class diff --git a/typedapi/types/dataframeevaluationoutlierdetection.go b/typedapi/types/dataframeevaluationoutlierdetection.go index 6c43b9ac43..dbd4726503 100644 --- a/typedapi/types/dataframeevaluationoutlierdetection.go +++ b/typedapi/types/dataframeevaluationoutlierdetection.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DataframeEvaluationOutlierDetection type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeEvaluation.ts#L46-L53 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeEvaluation.ts#L46-L53 type DataframeEvaluationOutlierDetection struct { // ActualField The field of the index which contains the ground truth. The data type of this // field can be boolean or integer. If the data type is integer, the value has diff --git a/typedapi/types/dataframeevaluationoutlierdetectionmetrics.go b/typedapi/types/dataframeevaluationoutlierdetectionmetrics.go index ca30b090ae..01986e1519 100644 --- a/typedapi/types/dataframeevaluationoutlierdetectionmetrics.go +++ b/typedapi/types/dataframeevaluationoutlierdetectionmetrics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // DataframeEvaluationOutlierDetectionMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeEvaluation.ts#L80-L83 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeEvaluation.ts#L80-L83 type DataframeEvaluationOutlierDetectionMetrics struct { // AucRoc The AUC ROC (area under the curve of the receiver operating characteristic) // score and optionally the curve. It is calculated for a specific class diff --git a/typedapi/types/dataframeevaluationregression.go b/typedapi/types/dataframeevaluationregression.go index 250ebd5f35..67cd85809c 100644 --- a/typedapi/types/dataframeevaluationregression.go +++ b/typedapi/types/dataframeevaluationregression.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DataframeEvaluationRegression type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeEvaluation.ts#L55-L62 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeEvaluation.ts#L55-L62 type DataframeEvaluationRegression struct { // ActualField The field of the index which contains the ground truth. The data type of this // field must be numerical. diff --git a/typedapi/types/dataframeevaluationregressionmetrics.go b/typedapi/types/dataframeevaluationregressionmetrics.go index 06d06acc70..c54c3d0e30 100644 --- a/typedapi/types/dataframeevaluationregressionmetrics.go +++ b/typedapi/types/dataframeevaluationregressionmetrics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // DataframeEvaluationRegressionMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeEvaluation.ts#L92-L110 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeEvaluation.ts#L92-L110 type DataframeEvaluationRegressionMetrics struct { // Huber Pseudo Huber loss function. Huber *DataframeEvaluationRegressionMetricsHuber `json:"huber,omitempty"` diff --git a/typedapi/types/dataframeevaluationregressionmetricshuber.go b/typedapi/types/dataframeevaluationregressionmetricshuber.go index 412d8e23c0..b1ae4242cb 100644 --- a/typedapi/types/dataframeevaluationregressionmetricshuber.go +++ b/typedapi/types/dataframeevaluationregressionmetricshuber.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeEvaluationRegressionMetricsHuber type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeEvaluation.ts#L117-L120 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeEvaluation.ts#L117-L120 type DataframeEvaluationRegressionMetricsHuber struct { // Delta Approximates 1/2 (prediction - actual)2 for values much less than delta and // approximates a straight line with slope delta for values much larger than @@ -55,7 +55,7 @@ func (s *DataframeEvaluationRegressionMetricsHuber) UnmarshalJSON(data []byte) e switch t { case "delta": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeevaluationregressionmetricsmsle.go b/typedapi/types/dataframeevaluationregressionmetricsmsle.go index c18b0850c1..8ec317273b 100644 --- a/typedapi/types/dataframeevaluationregressionmetricsmsle.go +++ b/typedapi/types/dataframeevaluationregressionmetricsmsle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeEvaluationRegressionMetricsMsle type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeEvaluation.ts#L112-L115 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeEvaluation.ts#L112-L115 type DataframeEvaluationRegressionMetricsMsle struct { // Offset Defines the transition point at which you switch from minimizing quadratic // error to minimizing quadratic log error. Defaults to 1. @@ -54,7 +54,7 @@ func (s *DataframeEvaluationRegressionMetricsMsle) UnmarshalJSON(data []byte) er switch t { case "offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeevaluationsummaryaucroc.go b/typedapi/types/dataframeevaluationsummaryaucroc.go index 582631f829..f0cc9ea1b8 100644 --- a/typedapi/types/dataframeevaluationsummaryaucroc.go +++ b/typedapi/types/dataframeevaluationsummaryaucroc.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeEvaluationSummaryAucRoc type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/types.ts#L91-L93 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/types.ts#L91-L93 type DataframeEvaluationSummaryAucRoc struct { Curve []DataframeEvaluationSummaryAucRocCurveItem `json:"curve,omitempty"` Value Float64 `json:"value"` @@ -58,7 +58,7 @@ func (s *DataframeEvaluationSummaryAucRoc) UnmarshalJSON(data []byte) error { } case "value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeevaluationsummaryaucroccurveitem.go b/typedapi/types/dataframeevaluationsummaryaucroccurveitem.go index 0ac5b57691..8eca51fea9 100644 --- a/typedapi/types/dataframeevaluationsummaryaucroccurveitem.go +++ b/typedapi/types/dataframeevaluationsummaryaucroccurveitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeEvaluationSummaryAucRocCurveItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/types.ts#L95-L99 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/types.ts#L95-L99 type DataframeEvaluationSummaryAucRocCurveItem struct { Fpr Float64 `json:"fpr"` Threshold Float64 `json:"threshold"` @@ -54,7 +54,7 @@ func (s *DataframeEvaluationSummaryAucRocCurveItem) UnmarshalJSON(data []byte) e switch t { case "fpr": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *DataframeEvaluationSummaryAucRocCurveItem) UnmarshalJSON(data []byte) e } case "threshold": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -86,7 +86,7 @@ func (s *DataframeEvaluationSummaryAucRocCurveItem) UnmarshalJSON(data []byte) e } case "tpr": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeevaluationvalue.go b/typedapi/types/dataframeevaluationvalue.go index 2548e0c2f1..0a5b37fa6d 100644 --- a/typedapi/types/dataframeevaluationvalue.go +++ b/typedapi/types/dataframeevaluationvalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframeEvaluationValue type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/types.ts#L87-L89 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/types.ts#L87-L89 type DataframeEvaluationValue struct { Value Float64 `json:"value"` } @@ -52,7 +52,7 @@ func (s *DataframeEvaluationValue) UnmarshalJSON(data []byte) error { switch t { case "value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeoutlierdetectionsummary.go b/typedapi/types/dataframeoutlierdetectionsummary.go index c9b6b5dd2a..d38ceb8c93 100644 --- a/typedapi/types/dataframeoutlierdetectionsummary.go +++ b/typedapi/types/dataframeoutlierdetectionsummary.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DataframeOutlierDetectionSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/types.ts#L24-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/types.ts#L24-L42 type DataframeOutlierDetectionSummary struct { // AucRoc The AUC ROC (area under the curve of the receiver operating characteristic) // score and optionally the curve. diff --git a/typedapi/types/dataframepreviewconfig.go b/typedapi/types/dataframepreviewconfig.go index 576b188ff0..c4382614d6 100644 --- a/typedapi/types/dataframepreviewconfig.go +++ b/typedapi/types/dataframepreviewconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataframePreviewConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/preview_data_frame_analytics/types.ts#L27-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/preview_data_frame_analytics/types.ts#L27-L33 type DataframePreviewConfig struct { Analysis DataframeAnalysisContainer `json:"analysis"` AnalyzedFields *DataframeAnalysisAnalyzedFields `json:"analyzed_fields,omitempty"` @@ -67,7 +67,7 @@ func (s *DataframePreviewConfig) UnmarshalJSON(data []byte) error { case "max_num_threads": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dataframeregressionsummary.go b/typedapi/types/dataframeregressionsummary.go index b878a45035..c629f20c2c 100644 --- a/typedapi/types/dataframeregressionsummary.go +++ b/typedapi/types/dataframeregressionsummary.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DataframeRegressionSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/evaluate_data_frame/types.ts#L68-L85 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/evaluate_data_frame/types.ts#L68-L85 type DataframeRegressionSummary struct { // Huber Pseudo Huber loss function. Huber *DataframeEvaluationValue `json:"huber,omitempty"` diff --git a/typedapi/types/datapathstats.go b/typedapi/types/datapathstats.go index f81d238817..40f30540e8 100644 --- a/typedapi/types/datapathstats.go +++ b/typedapi/types/datapathstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataPathStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L550-L594 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L550-L594 type DataPathStats struct { // Available Total amount of disk space available to this Java virtual machine on this // file store. @@ -90,7 +90,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { s.Available = &o case "available_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -129,7 +129,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { s.DiskReadSize = &o case "disk_read_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -144,7 +144,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { } case "disk_reads": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -171,7 +171,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { s.DiskWriteSize = &o case "disk_write_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -186,7 +186,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { } case "disk_writes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -213,7 +213,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { s.Free = &o case "free_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -264,7 +264,7 @@ func (s *DataPathStats) UnmarshalJSON(data []byte) error { s.Total = &o case "total_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datastream.go b/typedapi/types/datastream.go index cfd5dbcc19..e1d3a690ed 100644 --- a/typedapi/types/datastream.go +++ b/typedapi/types/datastream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // DataStream type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/DataStream.ts#L39-L112 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/DataStream.ts#L39-L112 type DataStream struct { // AllowCustomRouting If `true`, the data stream allows custom routing on write request. AllowCustomRouting *bool `json:"allow_custom_routing,omitempty"` @@ -106,7 +106,7 @@ func (s *DataStream) UnmarshalJSON(data []byte) error { switch t { case "allow_custom_routing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -121,7 +121,7 @@ func (s *DataStream) UnmarshalJSON(data []byte) error { case "generation": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -136,7 +136,7 @@ func (s *DataStream) UnmarshalJSON(data []byte) error { } case "hidden": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -180,7 +180,7 @@ func (s *DataStream) UnmarshalJSON(data []byte) error { } case "prefer_ilm": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -194,7 +194,7 @@ func (s *DataStream) UnmarshalJSON(data []byte) error { } case "replicated": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -213,7 +213,7 @@ func (s *DataStream) UnmarshalJSON(data []byte) error { } case "system": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datastreamindex.go b/typedapi/types/datastreamindex.go index 6a17c6d746..fc856112da 100644 --- a/typedapi/types/datastreamindex.go +++ b/typedapi/types/datastreamindex.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DataStreamIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/DataStream.ts#L121-L142 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/DataStream.ts#L121-L142 type DataStreamIndex struct { // IlmPolicy Name of the current ILM lifecycle policy configured for this backing index. IlmPolicy *string `json:"ilm_policy,omitempty"` @@ -84,7 +84,7 @@ func (s *DataStreamIndex) UnmarshalJSON(data []byte) error { } case "prefer_ilm": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datastreamlifecycle.go b/typedapi/types/datastreamlifecycle.go index 0f17bbe6aa..4127ae96e7 100644 --- a/typedapi/types/datastreamlifecycle.go +++ b/typedapi/types/datastreamlifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DataStreamLifecycle type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/DataStreamLifecycle.ts#L25-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/DataStreamLifecycle.ts#L25-L31 type DataStreamLifecycle struct { DataRetention Duration `json:"data_retention,omitempty"` Downsampling *DataStreamLifecycleDownsampling `json:"downsampling,omitempty"` diff --git a/typedapi/types/datastreamlifecycledownsampling.go b/typedapi/types/datastreamlifecycledownsampling.go index ce5cf47617..4a8c9cb6b4 100644 --- a/typedapi/types/datastreamlifecycledownsampling.go +++ b/typedapi/types/datastreamlifecycledownsampling.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DataStreamLifecycleDownsampling type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/DataStreamLifecycleDownsampling.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/DataStreamLifecycleDownsampling.ts#L22-L27 type DataStreamLifecycleDownsampling struct { // Rounds The list of downsampling rounds to execute as part of this downsampling // configuration diff --git a/typedapi/types/datastreamlifecycleexplain.go b/typedapi/types/datastreamlifecycleexplain.go index 6dcc144c87..d0ad224675 100644 --- a/typedapi/types/datastreamlifecycleexplain.go +++ b/typedapi/types/datastreamlifecycleexplain.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataStreamLifecycleExplain type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/explain_data_lifecycle/IndicesExplainDataLifecycleResponse.ts#L31-L41 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/explain_data_lifecycle/IndicesExplainDataLifecycleResponse.ts#L31-L41 type DataStreamLifecycleExplain struct { Error *string `json:"error,omitempty"` GenerationTime Duration `json:"generation_time,omitempty"` @@ -92,7 +92,7 @@ func (s *DataStreamLifecycleExplain) UnmarshalJSON(data []byte) error { } case "managed_by_lifecycle": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datastreamlifecyclerolloverconditions.go b/typedapi/types/datastreamlifecyclerolloverconditions.go index 0fdf3944a2..22fd3e0ed6 100644 --- a/typedapi/types/datastreamlifecyclerolloverconditions.go +++ b/typedapi/types/datastreamlifecyclerolloverconditions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataStreamLifecycleRolloverConditions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/DataStreamLifecycle.ts#L57-L69 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/DataStreamLifecycle.ts#L57-L69 type DataStreamLifecycleRolloverConditions struct { MaxAge *string `json:"max_age,omitempty"` MaxDocs *int64 `json:"max_docs,omitempty"` @@ -73,7 +73,7 @@ func (s *DataStreamLifecycleRolloverConditions) UnmarshalJSON(data []byte) error s.MaxAge = &o case "max_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *DataStreamLifecycleRolloverConditions) UnmarshalJSON(data []byte) error } case "max_primary_shard_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -118,7 +118,7 @@ func (s *DataStreamLifecycleRolloverConditions) UnmarshalJSON(data []byte) error } case "min_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -133,7 +133,7 @@ func (s *DataStreamLifecycleRolloverConditions) UnmarshalJSON(data []byte) error } case "min_primary_shard_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datastreamlifecyclewithrollover.go b/typedapi/types/datastreamlifecyclewithrollover.go index 2f6a4bc999..8894530ea9 100644 --- a/typedapi/types/datastreamlifecyclewithrollover.go +++ b/typedapi/types/datastreamlifecyclewithrollover.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DataStreamLifecycleWithRollover type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/DataStreamLifecycle.ts#L33-L55 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/DataStreamLifecycle.ts#L33-L55 type DataStreamLifecycleWithRollover struct { // DataRetention If defined, every document added to this data stream will be stored at least // for this time frame. diff --git a/typedapi/types/datastreamnames.go b/typedapi/types/datastreamnames.go index 306db24516..ef154e7dff 100644 --- a/typedapi/types/datastreamnames.go +++ b/typedapi/types/datastreamnames.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DataStreamNames type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L94-L94 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L94-L94 type DataStreamNames []string diff --git a/typedapi/types/datastreams.go b/typedapi/types/datastreams.go index 4c3347405a..b45ff6547c 100644 --- a/typedapi/types/datastreams.go +++ b/typedapi/types/datastreams.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataStreams type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L81-L84 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L81-L84 type DataStreams struct { Available bool `json:"available"` DataStreams int64 `json:"data_streams"` @@ -55,7 +55,7 @@ func (s *DataStreams) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -69,7 +69,7 @@ func (s *DataStreams) UnmarshalJSON(data []byte) error { } case "data_streams": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -84,7 +84,7 @@ func (s *DataStreams) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -98,7 +98,7 @@ func (s *DataStreams) UnmarshalJSON(data []byte) error { } case "indices_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datastreamsstatsitem.go b/typedapi/types/datastreamsstatsitem.go index 0239608138..68d3af1752 100644 --- a/typedapi/types/datastreamsstatsitem.go +++ b/typedapi/types/datastreamsstatsitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataStreamsStatsItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/data_streams_stats/IndicesDataStreamsStatsResponse.ts#L45-L65 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/data_streams_stats/IndicesDataStreamsStatsResponse.ts#L45-L65 type DataStreamsStatsItem struct { // BackingIndices Current number of backing indices for the data stream. BackingIndices int `json:"backing_indices"` @@ -69,7 +69,7 @@ func (s *DataStreamsStatsItem) UnmarshalJSON(data []byte) error { case "backing_indices": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -99,7 +99,7 @@ func (s *DataStreamsStatsItem) UnmarshalJSON(data []byte) error { } case "store_size_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datastreamtimestamp.go b/typedapi/types/datastreamtimestamp.go index 151843a9d5..4857398465 100644 --- a/typedapi/types/datastreamtimestamp.go +++ b/typedapi/types/datastreamtimestamp.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataStreamTimestamp type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/TypeMapping.ts#L59-L61 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/TypeMapping.ts#L59-L61 type DataStreamTimestamp struct { Enabled bool `json:"enabled"` } @@ -52,7 +52,7 @@ func (s *DataStreamTimestamp) UnmarshalJSON(data []byte) error { switch t { case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datastreamtimestampfield.go b/typedapi/types/datastreamtimestampfield.go index 9d838b85b6..cd3ebc9688 100644 --- a/typedapi/types/datastreamtimestampfield.go +++ b/typedapi/types/datastreamtimestampfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DataStreamTimestampField type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/DataStream.ts#L114-L119 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/DataStream.ts#L114-L119 type DataStreamTimestampField struct { // Name Name of the timestamp field for the data stream, which must be `@timestamp`. // The `@timestamp` field must be included in every document indexed to the data diff --git a/typedapi/types/datastreamvisibility.go b/typedapi/types/datastreamvisibility.go index 30490be101..af4cd9d81b 100644 --- a/typedapi/types/datastreamvisibility.go +++ b/typedapi/types/datastreamvisibility.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataStreamVisibility type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/DataStream.ts#L144-L146 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/DataStream.ts#L144-L146 type DataStreamVisibility struct { Hidden *bool `json:"hidden,omitempty"` } @@ -52,7 +52,7 @@ func (s *DataStreamVisibility) UnmarshalJSON(data []byte) error { switch t { case "hidden": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datastreamwithlifecycle.go b/typedapi/types/datastreamwithlifecycle.go index 473cd87053..b614eaa61b 100644 --- a/typedapi/types/datastreamwithlifecycle.go +++ b/typedapi/types/datastreamwithlifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DataStreamWithLifecycle type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get_data_lifecycle/IndicesGetDataLifecycleResponse.ts#L27-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get_data_lifecycle/IndicesGetDataLifecycleResponse.ts#L27-L30 type DataStreamWithLifecycle struct { Lifecycle *DataStreamLifecycle `json:"lifecycle,omitempty"` Name string `json:"name"` diff --git a/typedapi/types/datatierphasestatistics.go b/typedapi/types/datatierphasestatistics.go index 17936a5ea5..a2149af569 100644 --- a/typedapi/types/datatierphasestatistics.go +++ b/typedapi/types/datatierphasestatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataTierPhaseStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L86-L97 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L86-L97 type DataTierPhaseStatistics struct { DocCount int64 `json:"doc_count"` IndexCount int64 `json:"index_count"` @@ -61,7 +61,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -76,7 +76,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { } case "index_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { } case "node_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -106,7 +106,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { } case "primary_shard_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -121,7 +121,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { } case "primary_shard_size_avg_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -136,7 +136,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { } case "primary_shard_size_mad_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -151,7 +151,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { } case "primary_shard_size_median_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -166,7 +166,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { } case "primary_size_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -181,7 +181,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { } case "total_shard_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -196,7 +196,7 @@ func (s *DataTierPhaseStatistics) UnmarshalJSON(data []byte) error { } case "total_size_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datatiers.go b/typedapi/types/datatiers.go index 5f99c80bc4..1357e5dd7b 100644 --- a/typedapi/types/datatiers.go +++ b/typedapi/types/datatiers.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DataTiers type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L339-L349 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L339-L349 type DataTiers struct { Available bool `json:"available"` DataCold DataTierPhaseStatistics `json:"data_cold"` @@ -58,7 +58,7 @@ func (s *DataTiers) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -97,7 +97,7 @@ func (s *DataTiers) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datedecayfunction.go b/typedapi/types/datedecayfunction.go index 2b1a565fab..a1278dab4a 100644 --- a/typedapi/types/datedecayfunction.go +++ b/typedapi/types/datedecayfunction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -29,9 +29,9 @@ import ( // DateDecayFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L186-L188 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L193-L193 type DateDecayFunction struct { - DateDecayFunction map[string]DecayPlacementDateMathDuration `json:"-"` + DecayFunctionBaseDateMathDuration map[string]DecayPlacementDateMathDuration `json:"-"` // MultiValueMode Determines how the distance is calculated when a field used for computing the // decay contains multiple values. MultiValueMode *multivaluemode.MultiValueMode `json:"multi_value_mode,omitempty"` @@ -41,7 +41,7 @@ type DateDecayFunction struct { func (s DateDecayFunction) MarshalJSON() ([]byte, error) { type opt DateDecayFunction // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { @@ -53,10 +53,10 @@ func (s DateDecayFunction) MarshalJSON() ([]byte, error) { } // We inline the additional fields from the underlying map - for key, value := range s.DateDecayFunction { + for key, value := range s.DecayFunctionBaseDateMathDuration { tmp[fmt.Sprintf("%s", key)] = value } - delete(tmp, "DateDecayFunction") + delete(tmp, "DecayFunctionBaseDateMathDuration") data, err = json.Marshal(tmp) if err != nil { @@ -69,7 +69,7 @@ func (s DateDecayFunction) MarshalJSON() ([]byte, error) { // NewDateDecayFunction returns a DateDecayFunction. func NewDateDecayFunction() *DateDecayFunction { r := &DateDecayFunction{ - DateDecayFunction: make(map[string]DecayPlacementDateMathDuration, 0), + DecayFunctionBaseDateMathDuration: make(map[string]DecayPlacementDateMathDuration, 0), } return r diff --git a/typedapi/types/datedistancefeaturequery.go b/typedapi/types/datedistancefeaturequery.go index be214f4e9c..821008b8c2 100644 --- a/typedapi/types/datedistancefeaturequery.go +++ b/typedapi/types/datedistancefeaturequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DateDistanceFeatureQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L67-L70 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L72-L75 type DateDistanceFeatureQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -78,7 +78,7 @@ func (s *DateDistanceFeatureQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datehistogramaggregate.go b/typedapi/types/datehistogramaggregate.go index e5119a4b8e..9a112bbae4 100644 --- a/typedapi/types/datehistogramaggregate.go +++ b/typedapi/types/datehistogramaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DateHistogramAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L348-L349 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L350-L351 type DateHistogramAggregate struct { Buckets BucketsDateHistogramBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/datehistogramaggregation.go b/typedapi/types/datehistogramaggregation.go index 7c01e808ca..ccc115f7a7 100644 --- a/typedapi/types/datehistogramaggregation.go +++ b/typedapi/types/datehistogramaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // DateHistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L189-L247 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L191-L249 type DateHistogramAggregation struct { // CalendarInterval Calendar-aware interval. // Can be specified using the unit name, such as `month`, or as a single unit @@ -56,8 +56,7 @@ type DateHistogramAggregation struct { Interval Duration `json:"interval,omitempty"` // Keyed Set to `true` to associate a unique string key with each bucket and return // the ranges as a hash rather than an array. - Keyed *bool `json:"keyed,omitempty"` - Meta Metadata `json:"meta,omitempty"` + Keyed *bool `json:"keyed,omitempty"` // MinDocCount Only returns buckets that have `min_doc_count` number of documents. // By default, all buckets between the first bucket that matches documents and // the last one are returned. @@ -65,7 +64,6 @@ type DateHistogramAggregation struct { // Missing The value to apply to documents that do not have a value. // By default, documents without a value are ignored. Missing DateTime `json:"missing,omitempty"` - Name *string `json:"name,omitempty"` // Offset Changes the start value of each bucket by the specified positive (`+`) or // negative offset (`-`) duration. Offset Duration `json:"offset,omitempty"` @@ -136,7 +134,7 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { } case "keyed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -149,14 +147,9 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { s.Keyed = &v } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "min_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -175,18 +168,6 @@ func (s *DateHistogramAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Missing", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "offset": if err := dec.Decode(&s.Offset); err != nil { return fmt.Errorf("%s | %w", "Offset", err) diff --git a/typedapi/types/datehistogrambucket.go b/typedapi/types/datehistogrambucket.go index 5399608794..1be0415938 100644 --- a/typedapi/types/datehistogrambucket.go +++ b/typedapi/types/datehistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // DateHistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L351-L354 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L353-L356 type DateHistogramBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -56,7 +56,7 @@ func (s *DateHistogramBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -532,7 +532,7 @@ func (s *DateHistogramBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -582,7 +582,7 @@ func (s *DateHistogramBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -592,7 +592,7 @@ func (s *DateHistogramBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -609,7 +609,7 @@ func (s *DateHistogramBucket) UnmarshalJSON(data []byte) error { func (s DateHistogramBucket) MarshalJSON() ([]byte, error) { type opt DateHistogramBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/datehistogramgrouping.go b/typedapi/types/datehistogramgrouping.go index ed716ba4f9..b9c69e1396 100644 --- a/typedapi/types/datehistogramgrouping.go +++ b/typedapi/types/datehistogramgrouping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DateHistogramGrouping type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/_types/Groupings.ts#L42-L73 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/_types/Groupings.ts#L42-L73 type DateHistogramGrouping struct { // CalendarInterval The interval of time buckets to be generated when rolling up. CalendarInterval Duration `json:"calendar_interval,omitempty"` diff --git a/typedapi/types/dateindexnameprocessor.go b/typedapi/types/dateindexnameprocessor.go index 1de671cc48..a0cbbd93ba 100644 --- a/typedapi/types/dateindexnameprocessor.go +++ b/typedapi/types/dateindexnameprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DateIndexNameProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L502-L540 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L502-L540 type DateIndexNameProcessor struct { // DateFormats An array of the expected date formats for parsing dates / timestamps in the // document being preprocessed. @@ -135,7 +135,7 @@ func (s *DateIndexNameProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datenanosproperty.go b/typedapi/types/datenanosproperty.go index 8ee187eb9f..3d09010b89 100644 --- a/typedapi/types/datenanosproperty.go +++ b/typedapi/types/datenanosproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DateNanosProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L73-L81 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L79-L87 type DateNanosProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -70,7 +70,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -102,7 +102,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -127,7 +127,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -148,7 +148,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -238,12 +238,6 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -262,6 +256,18 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -418,6 +424,12 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -441,7 +453,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -456,7 +468,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -470,7 +482,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -498,7 +510,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { case "precision_step": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +531,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -540,7 +552,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -630,12 +642,6 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -654,6 +660,18 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -810,6 +828,12 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -832,7 +856,7 @@ func (s *DateNanosProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dateprocessor.go b/typedapi/types/dateprocessor.go index b639d55b6c..e073c96e73 100644 --- a/typedapi/types/dateprocessor.go +++ b/typedapi/types/dateprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DateProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L542-L569 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L542-L569 type DateProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -112,7 +112,7 @@ func (s *DateProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dateproperty.go b/typedapi/types/dateproperty.go index a52011d6cc..a34e07c785 100644 --- a/typedapi/types/dateproperty.go +++ b/typedapi/types/dateproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DateProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L61-L71 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L67-L77 type DateProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -72,7 +72,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -104,7 +104,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -134,7 +134,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -155,7 +155,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -245,12 +245,6 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -269,6 +263,18 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -425,6 +431,12 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -448,7 +460,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -463,7 +475,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -477,7 +489,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -517,7 +529,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { case "precision_step": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -538,7 +550,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -559,7 +571,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -649,12 +661,6 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -673,6 +679,18 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -829,6 +847,12 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -851,7 +875,7 @@ func (s *DateProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/daterangeaggregate.go b/typedapi/types/daterangeaggregate.go index 8d305e8a15..a5582ce4fe 100644 --- a/typedapi/types/daterangeaggregate.go +++ b/typedapi/types/daterangeaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DateRangeAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L543-L548 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L547-L552 type DateRangeAggregate struct { Buckets BucketsRangeBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/daterangeaggregation.go b/typedapi/types/daterangeaggregation.go index 952995c72a..ea0c179eb6 100644 --- a/typedapi/types/daterangeaggregation.go +++ b/typedapi/types/daterangeaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DateRangeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L268-L294 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L270-L296 type DateRangeAggregation struct { // Field The date field whose values are use to build ranges. Field *string `json:"field,omitempty"` @@ -39,12 +39,10 @@ type DateRangeAggregation struct { Format *string `json:"format,omitempty"` // Keyed Set to `true` to associate a unique string key with each bucket and returns // the ranges as a hash rather than an array. - Keyed *bool `json:"keyed,omitempty"` - Meta Metadata `json:"meta,omitempty"` + Keyed *bool `json:"keyed,omitempty"` // Missing The value to apply to documents that do not have a value. // By default, documents without a value are ignored. Missing Missing `json:"missing,omitempty"` - Name *string `json:"name,omitempty"` // Ranges Array of date ranges. Ranges []DateRangeExpression `json:"ranges,omitempty"` // TimeZone Time zone used to convert dates from another time zone to UTC. @@ -84,7 +82,7 @@ func (s *DateRangeAggregation) UnmarshalJSON(data []byte) error { s.Format = &o case "keyed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -97,28 +95,11 @@ func (s *DateRangeAggregation) UnmarshalJSON(data []byte) error { s.Keyed = &v } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "missing": if err := dec.Decode(&s.Missing); err != nil { return fmt.Errorf("%s | %w", "Missing", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "ranges": if err := dec.Decode(&s.Ranges); err != nil { return fmt.Errorf("%s | %w", "Ranges", err) diff --git a/typedapi/types/daterangeexpression.go b/typedapi/types/daterangeexpression.go index 90b705320a..f5f2ebc781 100644 --- a/typedapi/types/daterangeexpression.go +++ b/typedapi/types/daterangeexpression.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DateRangeExpression type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L305-L318 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L307-L320 type DateRangeExpression struct { // From Start of the range (inclusive). From FieldDateMath `json:"from,omitempty"` diff --git a/typedapi/types/daterangeproperty.go b/typedapi/types/daterangeproperty.go index c65f09e2ba..d4e79f128c 100644 --- a/typedapi/types/daterangeproperty.go +++ b/typedapi/types/daterangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DateRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/range.ts#L29-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/range.ts#L29-L32 type DateRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -68,7 +68,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -84,7 +84,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -139,7 +139,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -160,7 +160,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -250,12 +250,6 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -274,6 +268,18 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -430,6 +436,12 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -453,7 +465,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -468,7 +480,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -496,7 +508,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -517,7 +529,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -607,12 +619,6 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -631,6 +637,18 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -787,6 +805,12 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -809,7 +833,7 @@ func (s *DateRangeProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/daterangequery.go b/typedapi/types/daterangequery.go index 9df61c321b..e670123ced 100644 --- a/typedapi/types/daterangequery.go +++ b/typedapi/types/daterangequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DateRangeQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L116-L143 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L146-L155 type DateRangeQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -43,7 +43,7 @@ type DateRangeQuery struct { Boost *float32 `json:"boost,omitempty"` // Format Date format used to convert `date` values in the query. Format *string `json:"format,omitempty"` - From string `json:"from,omitempty"` + From *string `json:"from,omitempty"` // Gt Greater than. Gt *string `json:"gt,omitempty"` // Gte Greater than or equal to. @@ -58,7 +58,7 @@ type DateRangeQuery struct { // TimeZone Coordinated Universal Time (UTC) offset or IANA time zone used to convert // `date` values in the query to UTC. TimeZone *string `json:"time_zone,omitempty"` - To string `json:"to,omitempty"` + To *string `json:"to,omitempty"` } func (s *DateRangeQuery) UnmarshalJSON(data []byte) error { @@ -77,7 +77,7 @@ func (s *DateRangeQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/datetime.go b/typedapi/types/datetime.go index 35d7a16d69..66899256c9 100644 --- a/typedapi/types/datetime.go +++ b/typedapi/types/datetime.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // string // int64 // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Time.ts#L22-L27 -type DateTime interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Time.ts#L22-L27 +type DateTime any diff --git a/typedapi/types/decayfunction.go b/typedapi/types/decayfunction.go index b02bb45c7c..f2308ff158 100644 --- a/typedapi/types/decayfunction.go +++ b/typedapi/types/decayfunction.go @@ -16,15 +16,16 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DecayFunction holds the union for the following types: // +// UntypedDecayFunction // DateDecayFunction // NumericDecayFunction // GeoDecayFunction // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L194-L199 -type DecayFunction interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L199-L208 +type DecayFunction any diff --git a/typedapi/types/decayfunctionbasedatemathduration.go b/typedapi/types/decayfunctionbasedatemathduration.go new file mode 100644 index 0000000000..76585e1ce9 --- /dev/null +++ b/typedapi/types/decayfunctionbasedatemathduration.go @@ -0,0 +1,76 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "encoding/json" + "fmt" + + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/multivaluemode" +) + +// DecayFunctionBaseDateMathDuration type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L175-L186 +type DecayFunctionBaseDateMathDuration struct { + DecayFunctionBaseDateMathDuration map[string]DecayPlacementDateMathDuration `json:"-"` + // MultiValueMode Determines how the distance is calculated when a field used for computing the + // decay contains multiple values. + MultiValueMode *multivaluemode.MultiValueMode `json:"multi_value_mode,omitempty"` +} + +// MarhsalJSON overrides marshalling for types with additional properties +func (s DecayFunctionBaseDateMathDuration) MarshalJSON() ([]byte, error) { + type opt DecayFunctionBaseDateMathDuration + // We transform the struct to a map without the embedded additional properties map + tmp := make(map[string]any, 0) + + data, err := json.Marshal(opt(s)) + if err != nil { + return nil, err + } + err = json.Unmarshal(data, &tmp) + if err != nil { + return nil, err + } + + // We inline the additional fields from the underlying map + for key, value := range s.DecayFunctionBaseDateMathDuration { + tmp[fmt.Sprintf("%s", key)] = value + } + delete(tmp, "DecayFunctionBaseDateMathDuration") + + data, err = json.Marshal(tmp) + if err != nil { + return nil, err + } + + return data, nil +} + +// NewDecayFunctionBaseDateMathDuration returns a DecayFunctionBaseDateMathDuration. +func NewDecayFunctionBaseDateMathDuration() *DecayFunctionBaseDateMathDuration { + r := &DecayFunctionBaseDateMathDuration{ + DecayFunctionBaseDateMathDuration: make(map[string]DecayPlacementDateMathDuration, 0), + } + + return r +} diff --git a/typedapi/types/decayfunctionbasedoubledouble.go b/typedapi/types/decayfunctionbasedoubledouble.go new file mode 100644 index 0000000000..58e61d97de --- /dev/null +++ b/typedapi/types/decayfunctionbasedoubledouble.go @@ -0,0 +1,76 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "encoding/json" + "fmt" + + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/multivaluemode" +) + +// DecayFunctionBasedoubledouble type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L175-L186 +type DecayFunctionBasedoubledouble struct { + DecayFunctionBasedoubledouble map[string]DecayPlacementdoubledouble `json:"-"` + // MultiValueMode Determines how the distance is calculated when a field used for computing the + // decay contains multiple values. + MultiValueMode *multivaluemode.MultiValueMode `json:"multi_value_mode,omitempty"` +} + +// MarhsalJSON overrides marshalling for types with additional properties +func (s DecayFunctionBasedoubledouble) MarshalJSON() ([]byte, error) { + type opt DecayFunctionBasedoubledouble + // We transform the struct to a map without the embedded additional properties map + tmp := make(map[string]any, 0) + + data, err := json.Marshal(opt(s)) + if err != nil { + return nil, err + } + err = json.Unmarshal(data, &tmp) + if err != nil { + return nil, err + } + + // We inline the additional fields from the underlying map + for key, value := range s.DecayFunctionBasedoubledouble { + tmp[fmt.Sprintf("%s", key)] = value + } + delete(tmp, "DecayFunctionBasedoubledouble") + + data, err = json.Marshal(tmp) + if err != nil { + return nil, err + } + + return data, nil +} + +// NewDecayFunctionBasedoubledouble returns a DecayFunctionBasedoubledouble. +func NewDecayFunctionBasedoubledouble() *DecayFunctionBasedoubledouble { + r := &DecayFunctionBasedoubledouble{ + DecayFunctionBasedoubledouble: make(map[string]DecayPlacementdoubledouble, 0), + } + + return r +} diff --git a/typedapi/types/decayfunctionbasegeolocationdistance.go b/typedapi/types/decayfunctionbasegeolocationdistance.go new file mode 100644 index 0000000000..b62efc4ee6 --- /dev/null +++ b/typedapi/types/decayfunctionbasegeolocationdistance.go @@ -0,0 +1,76 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "encoding/json" + "fmt" + + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/multivaluemode" +) + +// DecayFunctionBaseGeoLocationDistance type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L175-L186 +type DecayFunctionBaseGeoLocationDistance struct { + DecayFunctionBaseGeoLocationDistance map[string]DecayPlacementGeoLocationDistance `json:"-"` + // MultiValueMode Determines how the distance is calculated when a field used for computing the + // decay contains multiple values. + MultiValueMode *multivaluemode.MultiValueMode `json:"multi_value_mode,omitempty"` +} + +// MarhsalJSON overrides marshalling for types with additional properties +func (s DecayFunctionBaseGeoLocationDistance) MarshalJSON() ([]byte, error) { + type opt DecayFunctionBaseGeoLocationDistance + // We transform the struct to a map without the embedded additional properties map + tmp := make(map[string]any, 0) + + data, err := json.Marshal(opt(s)) + if err != nil { + return nil, err + } + err = json.Unmarshal(data, &tmp) + if err != nil { + return nil, err + } + + // We inline the additional fields from the underlying map + for key, value := range s.DecayFunctionBaseGeoLocationDistance { + tmp[fmt.Sprintf("%s", key)] = value + } + delete(tmp, "DecayFunctionBaseGeoLocationDistance") + + data, err = json.Marshal(tmp) + if err != nil { + return nil, err + } + + return data, nil +} + +// NewDecayFunctionBaseGeoLocationDistance returns a DecayFunctionBaseGeoLocationDistance. +func NewDecayFunctionBaseGeoLocationDistance() *DecayFunctionBaseGeoLocationDistance { + r := &DecayFunctionBaseGeoLocationDistance{ + DecayFunctionBaseGeoLocationDistance: make(map[string]DecayPlacementGeoLocationDistance, 0), + } + + return r +} diff --git a/typedapi/types/decayplacement.go b/typedapi/types/decayplacement.go new file mode 100644 index 0000000000..3957ae2569 --- /dev/null +++ b/typedapi/types/decayplacement.go @@ -0,0 +1,105 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// DecayPlacement type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L154-L173 +type DecayPlacement struct { + // Decay Defines how documents are scored at the distance given at scale. + Decay *Float64 `json:"decay,omitempty"` + // Offset If defined, the decay function will only compute the decay function for + // documents with a distance greater than the defined `offset`. + Offset json.RawMessage `json:"offset,omitempty"` + // Origin The point of origin used for calculating distance. Must be given as a number + // for numeric field, date for date fields and geo point for geo fields. + Origin json.RawMessage `json:"origin,omitempty"` + // Scale Defines the distance from origin + offset at which the computed score will + // equal `decay` parameter. + Scale json.RawMessage `json:"scale,omitempty"` +} + +func (s *DecayPlacement) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "decay": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return fmt.Errorf("%s | %w", "Decay", err) + } + f := Float64(value) + s.Decay = &f + case float64: + f := Float64(v) + s.Decay = &f + } + + case "offset": + if err := dec.Decode(&s.Offset); err != nil { + return fmt.Errorf("%s | %w", "Offset", err) + } + + case "origin": + if err := dec.Decode(&s.Origin); err != nil { + return fmt.Errorf("%s | %w", "Origin", err) + } + + case "scale": + if err := dec.Decode(&s.Scale); err != nil { + return fmt.Errorf("%s | %w", "Scale", err) + } + + } + } + return nil +} + +// NewDecayPlacement returns a DecayPlacement. +func NewDecayPlacement() *DecayPlacement { + r := &DecayPlacement{} + + return r +} diff --git a/typedapi/types/decayplacementdatemathduration.go b/typedapi/types/decayplacementdatemathduration.go index 4494617cfe..d4c4f40574 100644 --- a/typedapi/types/decayplacementdatemathduration.go +++ b/typedapi/types/decayplacementdatemathduration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DecayPlacementDateMathDuration type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L153-L172 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L154-L173 type DecayPlacementDateMathDuration struct { // Decay Defines how documents are scored at the distance given at scale. Decay *Float64 `json:"decay,omitempty"` @@ -62,7 +62,7 @@ func (s *DecayPlacementDateMathDuration) UnmarshalJSON(data []byte) error { switch t { case "decay": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/decayplacementdoubledouble.go b/typedapi/types/decayplacementdoubledouble.go index 8a3b9ed656..8f450bfd17 100644 --- a/typedapi/types/decayplacementdoubledouble.go +++ b/typedapi/types/decayplacementdoubledouble.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DecayPlacementdoubledouble type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L153-L172 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L154-L173 type DecayPlacementdoubledouble struct { // Decay Defines how documents are scored at the distance given at scale. Decay *Float64 `json:"decay,omitempty"` @@ -62,7 +62,7 @@ func (s *DecayPlacementdoubledouble) UnmarshalJSON(data []byte) error { switch t { case "decay": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -78,7 +78,7 @@ func (s *DecayPlacementdoubledouble) UnmarshalJSON(data []byte) error { } case "offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -94,7 +94,7 @@ func (s *DecayPlacementdoubledouble) UnmarshalJSON(data []byte) error { } case "origin": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -110,7 +110,7 @@ func (s *DecayPlacementdoubledouble) UnmarshalJSON(data []byte) error { } case "scale": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/decayplacementgeolocationdistance.go b/typedapi/types/decayplacementgeolocationdistance.go index dac8bdb411..d3d74dcb69 100644 --- a/typedapi/types/decayplacementgeolocationdistance.go +++ b/typedapi/types/decayplacementgeolocationdistance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DecayPlacementGeoLocationDistance type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L153-L172 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L154-L173 type DecayPlacementGeoLocationDistance struct { // Decay Defines how documents are scored at the distance given at scale. Decay *Float64 `json:"decay,omitempty"` @@ -62,7 +62,7 @@ func (s *DecayPlacementGeoLocationDistance) UnmarshalJSON(data []byte) error { switch t { case "decay": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/defaults.go b/typedapi/types/defaults.go index 062d3c0271..f782881b05 100644 --- a/typedapi/types/defaults.go +++ b/typedapi/types/defaults.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Defaults type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/info/types.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/info/types.ts#L24-L27 type Defaults struct { AnomalyDetectors AnomalyDetectors `json:"anomaly_detectors"` Datafeeds Datafeeds `json:"datafeeds"` diff --git a/typedapi/types/definition.go b/typedapi/types/definition.go index ba1d700e4c..9d1fa81a1d 100644 --- a/typedapi/types/definition.go +++ b/typedapi/types/definition.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Definition type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model/types.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model/types.ts#L24-L29 type Definition struct { // Preprocessors Collection of preprocessors Preprocessors []Preprocessor `json:"preprocessors,omitempty"` diff --git a/typedapi/types/delayeddatacheckconfig.go b/typedapi/types/delayeddatacheckconfig.go index 64995b78d7..1ab8e47c3a 100644 --- a/typedapi/types/delayeddatacheckconfig.go +++ b/typedapi/types/delayeddatacheckconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DelayedDataCheckConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Datafeed.ts#L119-L130 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Datafeed.ts#L118-L129 type DelayedDataCheckConfig struct { // CheckWindow The window of time that is searched for late data. This window of time ends // with the latest finalized bucket. @@ -65,7 +65,7 @@ func (s *DelayedDataCheckConfig) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/deleteinferenceendpointresult.go b/typedapi/types/deleteinferenceendpointresult.go new file mode 100644 index 0000000000..75f5432891 --- /dev/null +++ b/typedapi/types/deleteinferenceendpointresult.go @@ -0,0 +1,86 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// DeleteInferenceEndpointResult type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/_types/Results.ts#L91-L96 +type DeleteInferenceEndpointResult struct { + // Acknowledged For a successful response, this value is always true. On failure, an + // exception is returned instead. + Acknowledged bool `json:"acknowledged"` + Pipelines []string `json:"pipelines"` +} + +func (s *DeleteInferenceEndpointResult) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "acknowledged": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Acknowledged", err) + } + s.Acknowledged = value + case bool: + s.Acknowledged = v + } + + case "pipelines": + if err := dec.Decode(&s.Pipelines); err != nil { + return fmt.Errorf("%s | %w", "Pipelines", err) + } + + } + } + return nil +} + +// NewDeleteInferenceEndpointResult returns a DeleteInferenceEndpointResult. +func NewDeleteInferenceEndpointResult() *DeleteInferenceEndpointResult { + r := &DeleteInferenceEndpointResult{} + + return r +} diff --git a/typedapi/types/deleteoperation.go b/typedapi/types/deleteoperation.go index 0bbafc0d1c..41122533ca 100644 --- a/typedapi/types/deleteoperation.go +++ b/typedapi/types/deleteoperation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DeleteOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/bulk/types.ts#L134-L134 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/bulk/types.ts#L134-L134 type DeleteOperation struct { // Id_ The document ID. Id_ *string `json:"_id,omitempty"` @@ -68,7 +68,7 @@ func (s *DeleteOperation) UnmarshalJSON(data []byte) error { } case "if_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/delimitedpayloadtokenfilter.go b/typedapi/types/delimitedpayloadtokenfilter.go index e05540a947..2b823e9284 100644 --- a/typedapi/types/delimitedpayloadtokenfilter.go +++ b/typedapi/types/delimitedpayloadtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DelimitedPayloadTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L68-L72 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L68-L72 type DelimitedPayloadTokenFilter struct { Delimiter *string `json:"delimiter,omitempty"` Encoding *delimitedpayloadencoding.DelimitedPayloadEncoding `json:"encoding,omitempty"` diff --git a/typedapi/types/densevectorindexoptions.go b/typedapi/types/densevectorindexoptions.go index bdcac47a19..38499d4c6d 100644 --- a/typedapi/types/densevectorindexoptions.go +++ b/typedapi/types/densevectorindexoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DenseVectorIndexOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/DenseVectorIndexOptions.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/DenseVectorIndexOptions.ts#L22-L26 type DenseVectorIndexOptions struct { EfConstruction int `json:"ef_construction"` M int `json:"m"` @@ -55,7 +55,7 @@ func (s *DenseVectorIndexOptions) UnmarshalJSON(data []byte) error { case "ef_construction": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *DenseVectorIndexOptions) UnmarshalJSON(data []byte) error { case "m": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/densevectorproperty.go b/typedapi/types/densevectorproperty.go index ec047a10d5..4a741ae7ac 100644 --- a/typedapi/types/densevectorproperty.go +++ b/typedapi/types/densevectorproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,10 +33,11 @@ import ( // DenseVectorProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/complex.ts#L52-L58 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/complex.ts#L52-L59 type DenseVectorProperty struct { Dims *int `json:"dims,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` + ElementType *string `json:"element_type,omitempty"` Fields map[string]Property `json:"fields,omitempty"` IgnoreAbove *int `json:"ignore_above,omitempty"` Index *bool `json:"index,omitempty"` @@ -65,7 +66,7 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { case "dims": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -84,6 +85,18 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Dynamic", err) } + case "element_type": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "ElementType", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.ElementType = &o + case "fields": if s.Fields == nil { s.Fields = make(map[string]Property, 0) @@ -91,7 +104,7 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -112,7 +125,7 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -202,12 +215,6 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -226,6 +233,18 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -382,6 +401,12 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -393,7 +418,7 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -408,7 +433,7 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -441,7 +466,7 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -462,7 +487,7 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -552,12 +577,6 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -576,6 +595,18 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -732,6 +763,12 @@ func (s *DenseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -769,6 +806,7 @@ func (s DenseVectorProperty) MarshalJSON() ([]byte, error) { tmp := innerDenseVectorProperty{ Dims: s.Dims, Dynamic: s.Dynamic, + ElementType: s.ElementType, Fields: s.Fields, IgnoreAbove: s.IgnoreAbove, Index: s.Index, diff --git a/typedapi/types/deprecation.go b/typedapi/types/deprecation.go index 4accdf419a..5612111565 100644 --- a/typedapi/types/deprecation.go +++ b/typedapi/types/deprecation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // Deprecation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/migration/deprecations/types.ts#L29-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/migration/deprecations/types.ts#L29-L35 type Deprecation struct { Details string `json:"details"` // Level The level property describes the significance of the issue. diff --git a/typedapi/types/deprecationindexing.go b/typedapi/types/deprecationindexing.go index 1abef12479..1e907adecc 100644 --- a/typedapi/types/deprecationindexing.go +++ b/typedapi/types/deprecationindexing.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DeprecationIndexing type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L144-L146 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L144-L146 type DeprecationIndexing struct { Enabled string `json:"enabled"` } diff --git a/typedapi/types/derivativeaggregate.go b/typedapi/types/derivativeaggregate.go index 83984d1fd7..b5584b02cc 100644 --- a/typedapi/types/derivativeaggregate.go +++ b/typedapi/types/derivativeaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DerivativeAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L227-L231 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L227-L231 type DerivativeAggregate struct { Meta Metadata `json:"meta,omitempty"` NormalizedValue *Float64 `json:"normalized_value,omitempty"` @@ -39,8 +39,8 @@ type DerivativeAggregate struct { // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. - Value Float64 `json:"value,omitempty"` - ValueAsString *string `json:"value_as_string,omitempty"` + Value *Float64 `json:"value,omitempty"` + ValueAsString *string `json:"value_as_string,omitempty"` } func (s *DerivativeAggregate) UnmarshalJSON(data []byte) error { @@ -64,7 +64,7 @@ func (s *DerivativeAggregate) UnmarshalJSON(data []byte) error { } case "normalized_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/derivativeaggregation.go b/typedapi/types/derivativeaggregation.go index 21fe5684c8..5f4f1cdffa 100644 --- a/typedapi/types/derivativeaggregation.go +++ b/typedapi/types/derivativeaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DerivativeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L196-L196 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L196-L196 type DerivativeAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,8 +43,6 @@ type DerivativeAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` } func (s *DerivativeAggregation) UnmarshalJSON(data []byte) error { @@ -84,23 +82,6 @@ func (s *DerivativeAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/detectionrule.go b/typedapi/types/detectionrule.go index d7f173a08a..4329c00cb7 100644 --- a/typedapi/types/detectionrule.go +++ b/typedapi/types/detectionrule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // DetectionRule type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Rule.ts#L25-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Rule.ts#L25-L39 type DetectionRule struct { // Actions The set of actions to be triggered when the rule applies. If more than one // action is specified the effects of all actions are combined. diff --git a/typedapi/types/detector.go b/typedapi/types/detector.go index 054b88bc2e..8ee0957db1 100644 --- a/typedapi/types/detector.go +++ b/typedapi/types/detector.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // Detector type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Detector.ts#L25-L67 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Detector.ts#L25-L67 type Detector struct { // ByFieldName The field used to split the data. In particular, this property is used for // analyzing the splits with respect to their own history. It is used for @@ -112,7 +112,7 @@ func (s *Detector) UnmarshalJSON(data []byte) error { case "detector_index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -159,7 +159,7 @@ func (s *Detector) UnmarshalJSON(data []byte) error { } case "use_null": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/detectorread.go b/typedapi/types/detectorread.go index e9dea64bca..48812f4db7 100644 --- a/typedapi/types/detectorread.go +++ b/typedapi/types/detectorread.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DetectorRead type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Detector.ts#L69-L125 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Detector.ts#L69-L125 type DetectorRead struct { // ByFieldName The field used to split the data. // In particular, this property is used for analyzing the splits with respect to @@ -119,7 +119,7 @@ func (s *DetectorRead) UnmarshalJSON(data []byte) error { case "detector_index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -166,7 +166,7 @@ func (s *DetectorRead) UnmarshalJSON(data []byte) error { } case "use_null": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/diagnosis.go b/typedapi/types/diagnosis.go index e1c779a24c..9e4f21e5b2 100644 --- a/typedapi/types/diagnosis.go +++ b/typedapi/types/diagnosis.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Diagnosis type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L49-L55 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L49-L55 type Diagnosis struct { Action string `json:"action"` AffectedResources DiagnosisAffectedResources `json:"affected_resources"` diff --git a/typedapi/types/diagnosisaffectedresources.go b/typedapi/types/diagnosisaffectedresources.go index 2bfefd2288..550e5343ac 100644 --- a/typedapi/types/diagnosisaffectedresources.go +++ b/typedapi/types/diagnosisaffectedresources.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DiagnosisAffectedResources type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L57-L63 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L57-L63 type DiagnosisAffectedResources struct { FeatureStates []string `json:"feature_states,omitempty"` Indices []string `json:"indices,omitempty"` diff --git a/typedapi/types/dictionarydecompoundertokenfilter.go b/typedapi/types/dictionarydecompoundertokenfilter.go index 5f82a48b54..10723d76bf 100644 --- a/typedapi/types/dictionarydecompoundertokenfilter.go +++ b/typedapi/types/dictionarydecompoundertokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DictionaryDecompounderTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L54-L56 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L54-L56 type DictionaryDecompounderTokenFilter struct { HyphenationPatternsPath *string `json:"hyphenation_patterns_path,omitempty"` MaxSubwordSize *int `json:"max_subword_size,omitempty"` @@ -73,7 +73,7 @@ func (s *DictionaryDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case "max_subword_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -89,7 +89,7 @@ func (s *DictionaryDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case "min_subword_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -105,7 +105,7 @@ func (s *DictionaryDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case "min_word_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -120,7 +120,7 @@ func (s *DictionaryDecompounderTokenFilter) UnmarshalJSON(data []byte) error { } case "only_longest_match": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/directgenerator.go b/typedapi/types/directgenerator.go index 3884d818e6..7b7622c05c 100644 --- a/typedapi/types/directgenerator.go +++ b/typedapi/types/directgenerator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DirectGenerator type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L265-L328 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L268-L331 type DirectGenerator struct { // Field The field to fetch the candidate suggestions from. // Needs to be set globally or per suggestion. @@ -102,7 +102,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { case "max_edits": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -117,7 +117,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { } case "max_inspections": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -133,7 +133,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { } case "max_term_freq": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -149,7 +149,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { } case "min_doc_freq": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -166,7 +166,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { case "min_word_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -206,7 +206,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { case "prefix_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -222,7 +222,7 @@ func (s *DirectGenerator) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/discovery.go b/typedapi/types/discovery.go index 4b43273440..9ac3183799 100644 --- a/typedapi/types/discovery.go +++ b/typedapi/types/discovery.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Discovery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L201-L219 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L201-L219 type Discovery struct { ClusterApplierStats *ClusterAppliedStats `json:"cluster_applier_stats,omitempty"` // ClusterStateQueue Contains statistics for the cluster state queue of the node. diff --git a/typedapi/types/discoverynode.go b/typedapi/types/discoverynode.go index b0aee2e2e3..b30a4687cf 100644 --- a/typedapi/types/discoverynode.go +++ b/typedapi/types/discoverynode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DiscoveryNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DiscoveryNode.ts#L24-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DiscoveryNode.ts#L24-L30 type DiscoveryNode struct { Attributes map[string]string `json:"attributes"` EphemeralId string `json:"ephemeral_id"` diff --git a/typedapi/types/diskindicator.go b/typedapi/types/diskindicator.go index 7de99a4c4d..9470fef9b4 100644 --- a/typedapi/types/diskindicator.go +++ b/typedapi/types/diskindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DiskIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L121-L125 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L121-L125 type DiskIndicator struct { Details *DiskIndicatorDetails `json:"details,omitempty"` Diagnosis []Diagnosis `json:"diagnosis,omitempty"` diff --git a/typedapi/types/diskindicatordetails.go b/typedapi/types/diskindicatordetails.go index 968530f1a2..119d7d5f65 100644 --- a/typedapi/types/diskindicatordetails.go +++ b/typedapi/types/diskindicatordetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DiskIndicatorDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L126-L132 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L126-L132 type DiskIndicatorDetails struct { IndicesWithReadonlyBlock int64 `json:"indices_with_readonly_block"` NodesOverFloodStageWatermark int64 `json:"nodes_over_flood_stage_watermark"` @@ -56,7 +56,7 @@ func (s *DiskIndicatorDetails) UnmarshalJSON(data []byte) error { switch t { case "indices_with_readonly_block": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *DiskIndicatorDetails) UnmarshalJSON(data []byte) error { } case "nodes_over_flood_stage_watermark": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -86,7 +86,7 @@ func (s *DiskIndicatorDetails) UnmarshalJSON(data []byte) error { } case "nodes_over_high_watermark": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -101,7 +101,7 @@ func (s *DiskIndicatorDetails) UnmarshalJSON(data []byte) error { } case "nodes_with_enough_disk_space": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -116,7 +116,7 @@ func (s *DiskIndicatorDetails) UnmarshalJSON(data []byte) error { } case "nodes_with_unknown_disk_status": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/diskusage.go b/typedapi/types/diskusage.go index 0bcb9c643c..3f0d7bea05 100644 --- a/typedapi/types/diskusage.go +++ b/typedapi/types/diskusage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DiskUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/allocation_explain/types.ts#L62-L69 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/allocation_explain/types.ts#L62-L69 type DiskUsage struct { FreeBytes int64 `json:"free_bytes"` FreeDiskPercent Float64 `json:"free_disk_percent"` @@ -57,7 +57,7 @@ func (s *DiskUsage) UnmarshalJSON(data []byte) error { switch t { case "free_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *DiskUsage) UnmarshalJSON(data []byte) error { } case "free_disk_percent": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *DiskUsage) UnmarshalJSON(data []byte) error { s.Path = o case "total_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -115,7 +115,7 @@ func (s *DiskUsage) UnmarshalJSON(data []byte) error { } case "used_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -130,7 +130,7 @@ func (s *DiskUsage) UnmarshalJSON(data []byte) error { } case "used_disk_percent": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dismaxquery.go b/typedapi/types/dismaxquery.go index 073378c1bf..c5415a4b1c 100644 --- a/typedapi/types/dismaxquery.go +++ b/typedapi/types/dismaxquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DisMaxQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L78-L90 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L79-L91 type DisMaxQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -66,7 +66,7 @@ func (s *DisMaxQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -99,7 +99,7 @@ func (s *DisMaxQuery) UnmarshalJSON(data []byte) error { s.QueryName_ = &o case "tie_breaker": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/dissectprocessor.go b/typedapi/types/dissectprocessor.go index 992640d4fd..48a0b66a88 100644 --- a/typedapi/types/dissectprocessor.go +++ b/typedapi/types/dissectprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DissectProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L571-L590 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L571-L590 type DissectProcessor struct { // AppendSeparator The character(s) that separate the appended fields. AppendSeparator *string `json:"append_separator,omitempty"` @@ -113,7 +113,7 @@ func (s *DissectProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -127,7 +127,7 @@ func (s *DissectProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/distancefeaturequery.go b/typedapi/types/distancefeaturequery.go index 01da93ce22..148fae3ca2 100644 --- a/typedapi/types/distancefeaturequery.go +++ b/typedapi/types/distancefeaturequery.go @@ -16,14 +16,15 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DistanceFeatureQuery holds the union for the following types: // +// UntypedDistanceFeatureQuery // GeoDistanceFeatureQuery // DateDistanceFeatureQuery // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L72-L76 -type DistanceFeatureQuery interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L77-L85 +type DistanceFeatureQuery any diff --git a/typedapi/types/distancefeaturequerybasedatemathduration.go b/typedapi/types/distancefeaturequerybasedatemathduration.go index c4bc27f8a5..19ffc70066 100644 --- a/typedapi/types/distancefeaturequerybasedatemathduration.go +++ b/typedapi/types/distancefeaturequerybasedatemathduration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DistanceFeatureQueryBaseDateMathDuration type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L40-L60 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L40-L60 type DistanceFeatureQueryBaseDateMathDuration struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -78,7 +78,7 @@ func (s *DistanceFeatureQueryBaseDateMathDuration) UnmarshalJSON(data []byte) er switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/distancefeaturequerybasegeolocationdistance.go b/typedapi/types/distancefeaturequerybasegeolocationdistance.go index 85aeba9c85..d5d3059af4 100644 --- a/typedapi/types/distancefeaturequerybasegeolocationdistance.go +++ b/typedapi/types/distancefeaturequerybasegeolocationdistance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DistanceFeatureQueryBaseGeoLocationDistance type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L40-L60 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L40-L60 type DistanceFeatureQueryBaseGeoLocationDistance struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -78,7 +78,7 @@ func (s *DistanceFeatureQueryBaseGeoLocationDistance) UnmarshalJSON(data []byte) switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/diversifiedsampleraggregation.go b/typedapi/types/diversifiedsampleraggregation.go index f9346711cf..bbf5c09c26 100644 --- a/typedapi/types/diversifiedsampleraggregation.go +++ b/typedapi/types/diversifiedsampleraggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,17 +33,15 @@ import ( // DiversifiedSamplerAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L320-L341 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L322-L343 type DiversifiedSamplerAggregation struct { // ExecutionHint The type of value used for de-duplication. ExecutionHint *sampleraggregationexecutionhint.SamplerAggregationExecutionHint `json:"execution_hint,omitempty"` // Field The field used to provide values used for de-duplication. Field *string `json:"field,omitempty"` // MaxDocsPerValue Limits how many documents are permitted per choice of de-duplicating value. - MaxDocsPerValue *int `json:"max_docs_per_value,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` - Script Script `json:"script,omitempty"` + MaxDocsPerValue *int `json:"max_docs_per_value,omitempty"` + Script Script `json:"script,omitempty"` // ShardSize Limits how many top-scoring documents are collected in the sample processed // on each shard. ShardSize *int `json:"shard_size,omitempty"` @@ -76,7 +74,7 @@ func (s *DiversifiedSamplerAggregation) UnmarshalJSON(data []byte) error { case "max_docs_per_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -90,23 +88,6 @@ func (s *DiversifiedSamplerAggregation) UnmarshalJSON(data []byte) error { s.MaxDocsPerValue = &f } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "script": message := json.RawMessage{} if err := dec.Decode(&message); err != nil { @@ -145,7 +126,7 @@ func (s *DiversifiedSamplerAggregation) UnmarshalJSON(data []byte) error { case "shard_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/docstats.go b/typedapi/types/docstats.go index 3c80e8b00f..e5d5e62ff9 100644 --- a/typedapi/types/docstats.go +++ b/typedapi/types/docstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DocStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L97-L109 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L97-L109 type DocStats struct { // Count Total number of non-deleted documents across all primary shards assigned to // selected nodes. @@ -62,7 +62,7 @@ func (s *DocStats) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -77,7 +77,7 @@ func (s *DocStats) UnmarshalJSON(data []byte) error { } case "deleted": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/document.go b/typedapi/types/document.go index 102e0e317e..0bb941b7d7 100644 --- a/typedapi/types/document.go +++ b/typedapi/types/document.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // Document type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/simulate/types.ts#L41-L55 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/simulate/types.ts#L41-L55 type Document struct { // Id_ Unique identifier for the document. // This ID must be unique within the `_index`. diff --git a/typedapi/types/documentrating.go b/typedapi/types/documentrating.go index 42a59ee75e..78f0e22e34 100644 --- a/typedapi/types/documentrating.go +++ b/typedapi/types/documentrating.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DocumentRating type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/types.ts#L116-L123 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/types.ts#L116-L123 type DocumentRating struct { // Id_ The document ID. Id_ string `json:"_id"` @@ -69,7 +69,7 @@ func (s *DocumentRating) UnmarshalJSON(data []byte) error { case "rating": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/documentsimulation.go b/typedapi/types/documentsimulation.go index 1635d6a46a..ea0c95e14e 100644 --- a/typedapi/types/documentsimulation.go +++ b/typedapi/types/documentsimulation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DocumentSimulation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/simulate/types.ts#L57-L85 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/simulate/types.ts#L57-L87 type DocumentSimulation struct { DocumentSimulation map[string]string `json:"-"` // Id_ Unique identifier for the document. This ID must be unique within the @@ -132,7 +132,7 @@ func (s *DocumentSimulation) UnmarshalJSON(data []byte) error { func (s DocumentSimulation) MarshalJSON() ([]byte, error) { type opt DocumentSimulation // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/dotexpanderprocessor.go b/typedapi/types/dotexpanderprocessor.go index 699f00729f..262149ae95 100644 --- a/typedapi/types/dotexpanderprocessor.go +++ b/typedapi/types/dotexpanderprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DotExpanderProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L592-L603 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L592-L603 type DotExpanderProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -99,7 +99,7 @@ func (s *DotExpanderProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/doublenumberproperty.go b/typedapi/types/doublenumberproperty.go index 2c3652f875..990f4a2a5a 100644 --- a/typedapi/types/doublenumberproperty.go +++ b/typedapi/types/doublenumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // DoubleNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L144-L147 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L152-L155 type DoubleNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -79,7 +79,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -150,7 +150,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -171,7 +171,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -261,12 +261,6 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -285,6 +279,18 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -441,6 +447,12 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -452,7 +464,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -467,7 +479,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -481,7 +493,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -503,7 +515,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { } case "null_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -530,7 +542,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -551,7 +563,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -641,12 +653,6 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -665,6 +671,18 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -821,6 +839,12 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -879,7 +903,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -893,7 +917,7 @@ func (s *DoubleNumberProperty) UnmarshalJSON(data []byte) error { } case "time_series_dimension": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/doublerangeproperty.go b/typedapi/types/doublerangeproperty.go index 3164c2d262..d5afac8a81 100644 --- a/typedapi/types/doublerangeproperty.go +++ b/typedapi/types/doublerangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // DoubleRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/range.ts#L34-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/range.ts#L34-L36 type DoubleRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -67,7 +67,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,7 +113,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -138,7 +138,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -159,7 +159,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -249,12 +249,6 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -273,6 +267,18 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -429,6 +435,12 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -440,7 +452,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -455,7 +467,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -483,7 +495,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -504,7 +516,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -594,12 +606,6 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -618,6 +624,18 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -774,6 +792,12 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -796,7 +820,7 @@ func (s *DoubleRangeProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/doubletermsaggregate.go b/typedapi/types/doubletermsaggregate.go index b06c6207f2..9c50cc09f0 100644 --- a/typedapi/types/doubletermsaggregate.go +++ b/typedapi/types/doubletermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DoubleTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L411-L416 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L413-L418 type DoubleTermsAggregate struct { Buckets BucketsDoubleTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -76,7 +76,7 @@ func (s *DoubleTermsAggregate) UnmarshalJSON(data []byte) error { } case "doc_count_error_upper_bound": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *DoubleTermsAggregate) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/doubletermsbucket.go b/typedapi/types/doubletermsbucket.go index 3cfec3483d..7992cb867f 100644 --- a/typedapi/types/doubletermsbucket.go +++ b/typedapi/types/doubletermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,13 +32,13 @@ import ( // DoubleTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L418-L421 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L420-L423 type DoubleTermsBucket struct { - Aggregations map[string]Aggregate `json:"-"` - DocCount int64 `json:"doc_count"` - DocCountError *int64 `json:"doc_count_error,omitempty"` - Key Float64 `json:"key"` - KeyAsString *string `json:"key_as_string,omitempty"` + Aggregations map[string]Aggregate `json:"-"` + DocCount int64 `json:"doc_count"` + DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` + Key Float64 `json:"key"` + KeyAsString *string `json:"key_as_string,omitempty"` } func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error { @@ -57,7 +57,7 @@ func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,23 +71,23 @@ func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error { s.DocCount = f } - case "doc_count_error": - var tmp interface{} + case "doc_count_error_upper_bound": + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return fmt.Errorf("%s | %w", "DocCountError", err) + return fmt.Errorf("%s | %w", "DocCountErrorUpperBound", err) } - s.DocCountError = &value + s.DocCountErrorUpperBound = &value case float64: f := int64(v) - s.DocCountError = &f + s.DocCountErrorUpperBound = &f } case "key": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -559,7 +559,7 @@ func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -609,7 +609,7 @@ func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -619,7 +619,7 @@ func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -636,7 +636,7 @@ func (s *DoubleTermsBucket) UnmarshalJSON(data []byte) error { func (s DoubleTermsBucket) MarshalJSON() ([]byte, error) { type opt DoubleTermsBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/downsampleconfig.go b/typedapi/types/downsampleconfig.go index f5cd066ce5..3661586a33 100644 --- a/typedapi/types/downsampleconfig.go +++ b/typedapi/types/downsampleconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DownsampleConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/Downsample.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/Downsample.ts#L22-L27 type DownsampleConfig struct { // FixedInterval The interval at which to aggregate the original time series index. FixedInterval string `json:"fixed_interval"` diff --git a/typedapi/types/downsamplinground.go b/typedapi/types/downsamplinground.go index 51f9e8c5fd..c147cc9bfb 100644 --- a/typedapi/types/downsamplinground.go +++ b/typedapi/types/downsamplinground.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DownsamplingRound type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/DownsamplingRound.ts#L23-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/DownsamplingRound.ts#L23-L32 type DownsamplingRound struct { // After The duration since rollover when this downsampling round should execute After Duration `json:"after"` diff --git a/typedapi/types/dropprocessor.go b/typedapi/types/dropprocessor.go index b4acbaed85..c8b1fe62d0 100644 --- a/typedapi/types/dropprocessor.go +++ b/typedapi/types/dropprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // DropProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L605-L605 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L605-L605 type DropProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -87,7 +87,7 @@ func (s *DropProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/duration.go b/typedapi/types/duration.go index 35b82c9f87..e714a2b398 100644 --- a/typedapi/types/duration.go +++ b/typedapi/types/duration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -24,5 +24,5 @@ package types // // string // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Time.ts#L52-L58 -type Duration interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Time.ts#L52-L58 +type Duration any diff --git a/typedapi/types/durationvalueunitfloatmillis.go b/typedapi/types/durationvalueunitfloatmillis.go index e82b39e748..df23139ed6 100644 --- a/typedapi/types/durationvalueunitfloatmillis.go +++ b/typedapi/types/durationvalueunitfloatmillis.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DurationValueUnitFloatMillis type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Time.ts#L67-L67 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Time.ts#L67-L67 type DurationValueUnitFloatMillis Float64 diff --git a/typedapi/types/durationvalueunitmillis.go b/typedapi/types/durationvalueunitmillis.go index 8060e4a6be..6945126dcf 100644 --- a/typedapi/types/durationvalueunitmillis.go +++ b/typedapi/types/durationvalueunitmillis.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DurationValueUnitMillis type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Time.ts#L67-L67 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Time.ts#L67-L67 type DurationValueUnitMillis int64 diff --git a/typedapi/types/durationvalueunitnanos.go b/typedapi/types/durationvalueunitnanos.go index 54633245e7..5e1970cb4b 100644 --- a/typedapi/types/durationvalueunitnanos.go +++ b/typedapi/types/durationvalueunitnanos.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DurationValueUnitNanos type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Time.ts#L67-L67 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Time.ts#L67-L67 type DurationValueUnitNanos int64 diff --git a/typedapi/types/durationvalueunitseconds.go b/typedapi/types/durationvalueunitseconds.go index 2fd5f2ac86..62ff0d35eb 100644 --- a/typedapi/types/durationvalueunitseconds.go +++ b/typedapi/types/durationvalueunitseconds.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // DurationValueUnitSeconds type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Time.ts#L67-L67 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Time.ts#L67-L67 type DurationValueUnitSeconds int64 diff --git a/typedapi/types/dutchanalyzer.go b/typedapi/types/dutchanalyzer.go index 1f691899bd..3e080e6f63 100644 --- a/typedapi/types/dutchanalyzer.go +++ b/typedapi/types/dutchanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // DutchAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/analyzers.ts#L61-L64 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/analyzers.ts#L61-L64 type DutchAnalyzer struct { Stopwords []string `json:"stopwords,omitempty"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/dynamicproperty.go b/typedapi/types/dynamicproperty.go index 281e574e62..a0589555be 100644 --- a/typedapi/types/dynamicproperty.go +++ b/typedapi/types/dynamicproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -37,7 +37,7 @@ import ( // DynamicProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L286-L317 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L300-L331 type DynamicProperty struct { Analyzer *string `json:"analyzer,omitempty"` Boost *Float64 `json:"boost,omitempty"` @@ -102,7 +102,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { s.Analyzer = &o case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -118,7 +118,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -148,7 +148,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -167,7 +167,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { } case "eager_global_ordinals": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -181,7 +181,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -201,7 +201,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -222,7 +222,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -312,12 +312,6 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -336,6 +330,18 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -492,6 +498,12 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -515,7 +527,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -530,7 +542,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -544,7 +556,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -563,7 +575,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { } case "index_phrases": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -602,7 +614,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { } case "norms": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -627,7 +639,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case "position_increment_gap": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -643,7 +655,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { case "precision_step": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -664,7 +676,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -685,7 +697,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -775,12 +787,6 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -799,6 +805,18 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -955,6 +973,12 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -1037,7 +1061,7 @@ func (s *DynamicProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -1108,7 +1132,7 @@ func (s DynamicProperty) MarshalJSON() ([]byte, error) { Type: s.Type, } - tmp.Type = "{dynamic_property}" + tmp.Type = "{dynamic_type}" return json.Marshal(tmp) } diff --git a/typedapi/types/dynamictemplate.go b/typedapi/types/dynamictemplate.go index 9556a72245..ce145c3a7f 100644 --- a/typedapi/types/dynamictemplate.go +++ b/typedapi/types/dynamictemplate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,22 +26,23 @@ import ( "errors" "fmt" "io" - "strconv" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/matchtype" ) // DynamicTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/dynamic-template.ts#L22-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/dynamic-template.ts#L22-L42 type DynamicTemplate struct { - Mapping Property `json:"mapping,omitempty"` - Match *string `json:"match,omitempty"` - MatchMappingType *string `json:"match_mapping_type,omitempty"` - MatchPattern *matchtype.MatchType `json:"match_pattern,omitempty"` - PathMatch *string `json:"path_match,omitempty"` - PathUnmatch *string `json:"path_unmatch,omitempty"` - Unmatch *string `json:"unmatch,omitempty"` + Mapping Property `json:"mapping,omitempty"` + Match []string `json:"match,omitempty"` + MatchMappingType []string `json:"match_mapping_type,omitempty"` + MatchPattern *matchtype.MatchType `json:"match_pattern,omitempty"` + PathMatch []string `json:"path_match,omitempty"` + PathUnmatch []string `json:"path_unmatch,omitempty"` + Runtime Property `json:"runtime,omitempty"` + Unmatch []string `json:"unmatch,omitempty"` + UnmatchMappingType []string `json:"unmatch_mapping_type,omitempty"` } func (s *DynamicTemplate) UnmarshalJSON(data []byte) error { @@ -85,7 +86,7 @@ func (s *DynamicTemplate) UnmarshalJSON(data []byte) error { return err } s.Mapping = *o - case "{dynamic_property}": + case "{dynamic_type}": o := NewDynamicProperty() if err := localDec.Decode(&o); err != nil { return err @@ -175,12 +176,6 @@ func (s *DynamicTemplate) UnmarshalJSON(data []byte) error { return err } s.Mapping = *o - case "sparse_vector": - o := NewSparseVectorProperty() - if err := localDec.Decode(&o); err != nil { - return err - } - s.Mapping = *o case "flattened": o := NewFlattenedProperty() if err := localDec.Decode(&o); err != nil { @@ -199,6 +194,18 @@ func (s *DynamicTemplate) UnmarshalJSON(data []byte) error { return err } s.Mapping = *o + case "semantic_text": + o := NewSemanticTextProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Mapping = *o + case "sparse_vector": + o := NewSparseVectorProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Mapping = *o case "completion": o := NewCompletionProperty() if err := localDec.Decode(&o); err != nil { @@ -355,6 +362,12 @@ func (s *DynamicTemplate) UnmarshalJSON(data []byte) error { return err } s.Mapping = *o + case "icu_collation_keyword": + o := NewIcuCollationProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Mapping = *o default: if err := localDec.Decode(&s.Mapping); err != nil { return err @@ -362,28 +375,36 @@ func (s *DynamicTemplate) UnmarshalJSON(data []byte) error { } case "match": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Match", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Match", err) + } + + s.Match = append(s.Match, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Match); err != nil { + return fmt.Errorf("%s | %w", "Match", err) + } } - s.Match = &o case "match_mapping_type": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "MatchMappingType", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "MatchMappingType", err) + } + + s.MatchMappingType = append(s.MatchMappingType, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.MatchMappingType); err != nil { + return fmt.Errorf("%s | %w", "MatchMappingType", err) + } } - s.MatchMappingType = &o case "match_pattern": if err := dec.Decode(&s.MatchPattern); err != nil { @@ -391,40 +412,382 @@ func (s *DynamicTemplate) UnmarshalJSON(data []byte) error { } case "path_match": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "PathMatch", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "PathMatch", err) + } + + s.PathMatch = append(s.PathMatch, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.PathMatch); err != nil { + return fmt.Errorf("%s | %w", "PathMatch", err) + } } - s.PathMatch = &o case "path_unmatch": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "PathUnmatch", err) + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "PathUnmatch", err) + } + + s.PathUnmatch = append(s.PathUnmatch, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.PathUnmatch); err != nil { + return fmt.Errorf("%s | %w", "PathUnmatch", err) + } } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) + + case "runtime": + + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + source := bytes.NewReader(rawMsg) + kind := make(map[string]string, 0) + localDec := json.NewDecoder(source) + localDec.Decode(&kind) + source.Seek(0, io.SeekStart) + if _, ok := kind["type"]; !ok { + kind["type"] = "object" + } + switch kind["type"] { + + case "binary": + o := NewBinaryProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "boolean": + o := NewBooleanProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "{dynamic_type}": + o := NewDynamicProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "join": + o := NewJoinProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "keyword": + o := NewKeywordProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "match_only_text": + o := NewMatchOnlyTextProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "percolator": + o := NewPercolatorProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "rank_feature": + o := NewRankFeatureProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "rank_features": + o := NewRankFeaturesProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "search_as_you_type": + o := NewSearchAsYouTypeProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "text": + o := NewTextProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "version": + o := NewVersionProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "wildcard": + o := NewWildcardProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "date_nanos": + o := NewDateNanosProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "date": + o := NewDateProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "aggregate_metric_double": + o := NewAggregateMetricDoubleProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "dense_vector": + o := NewDenseVectorProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "flattened": + o := NewFlattenedProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "nested": + o := NewNestedProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "object": + o := NewObjectProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "semantic_text": + o := NewSemanticTextProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "sparse_vector": + o := NewSparseVectorProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "completion": + o := NewCompletionProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "constant_keyword": + o := NewConstantKeywordProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "alias": + o := NewFieldAliasProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "histogram": + o := NewHistogramProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "ip": + o := NewIpProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "murmur3": + o := NewMurmur3HashProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "token_count": + o := NewTokenCountProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "geo_point": + o := NewGeoPointProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "geo_shape": + o := NewGeoShapeProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "point": + o := NewPointProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "shape": + o := NewShapeProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "byte": + o := NewByteNumberProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "double": + o := NewDoubleNumberProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "float": + o := NewFloatNumberProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "half_float": + o := NewHalfFloatNumberProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "integer": + o := NewIntegerNumberProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "long": + o := NewLongNumberProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "scaled_float": + o := NewScaledFloatNumberProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "short": + o := NewShortNumberProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "unsigned_long": + o := NewUnsignedLongNumberProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "date_range": + o := NewDateRangeProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "double_range": + o := NewDoubleRangeProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "float_range": + o := NewFloatRangeProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "integer_range": + o := NewIntegerRangeProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "ip_range": + o := NewIpRangeProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "long_range": + o := NewLongRangeProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + case "icu_collation_keyword": + o := NewIcuCollationProperty() + if err := localDec.Decode(&o); err != nil { + return err + } + s.Runtime = *o + default: + if err := localDec.Decode(&s.Runtime); err != nil { + return err + } } - s.PathUnmatch = &o case "unmatch": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Unmatch", err) + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Unmatch", err) + } + + s.Unmatch = append(s.Unmatch, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Unmatch); err != nil { + return fmt.Errorf("%s | %w", "Unmatch", err) + } } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) + + case "unmatch_mapping_type": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "UnmatchMappingType", err) + } + + s.UnmatchMappingType = append(s.UnmatchMappingType, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.UnmatchMappingType); err != nil { + return fmt.Errorf("%s | %w", "UnmatchMappingType", err) + } } - s.Unmatch = &o } } diff --git a/typedapi/types/edgengramtokenfilter.go b/typedapi/types/edgengramtokenfilter.go index 0690d8d7cd..b95a68d465 100644 --- a/typedapi/types/edgengramtokenfilter.go +++ b/typedapi/types/edgengramtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // EdgeNGramTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L79-L85 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L79-L85 type EdgeNGramTokenFilter struct { MaxGram *int `json:"max_gram,omitempty"` MinGram *int `json:"min_gram,omitempty"` @@ -60,7 +60,7 @@ func (s *EdgeNGramTokenFilter) UnmarshalJSON(data []byte) error { case "max_gram": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -76,7 +76,7 @@ func (s *EdgeNGramTokenFilter) UnmarshalJSON(data []byte) error { case "min_gram": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/edgengramtokenizer.go b/typedapi/types/edgengramtokenizer.go index 218ab8f340..bd00325a6a 100644 --- a/typedapi/types/edgengramtokenizer.go +++ b/typedapi/types/edgengramtokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // EdgeNGramTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L31-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L31-L37 type EdgeNGramTokenizer struct { CustomTokenChars *string `json:"custom_token_chars,omitempty"` MaxGram int `json:"max_gram"` @@ -72,7 +72,7 @@ func (s *EdgeNGramTokenizer) UnmarshalJSON(data []byte) error { case "max_gram": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *EdgeNGramTokenizer) UnmarshalJSON(data []byte) error { case "min_gram": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/elasticsearchversioninfo.go b/typedapi/types/elasticsearchversioninfo.go index 9bd70ff67a..95ecad220b 100644 --- a/typedapi/types/elasticsearchversioninfo.go +++ b/typedapi/types/elasticsearchversioninfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ElasticsearchVersionInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Base.ts#L54-L64 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Base.ts#L54-L64 type ElasticsearchVersionInfo struct { BuildDate DateTime `json:"build_date"` BuildFlavor string `json:"build_flavor"` @@ -89,7 +89,7 @@ func (s *ElasticsearchVersionInfo) UnmarshalJSON(data []byte) error { s.BuildHash = o case "build_snapshot": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/elasticsearchversionmininfo.go b/typedapi/types/elasticsearchversionmininfo.go index 6a4a59f6a5..b5d98a642c 100644 --- a/typedapi/types/elasticsearchversionmininfo.go +++ b/typedapi/types/elasticsearchversionmininfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ElasticsearchVersionMinInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Base.ts#L66-L74 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Base.ts#L66-L74 type ElasticsearchVersionMinInfo struct { BuildFlavor string `json:"build_flavor"` Int string `json:"number"` diff --git a/typedapi/types/elisiontokenfilter.go b/typedapi/types/elisiontokenfilter.go index e5a690b969..c9c443c91d 100644 --- a/typedapi/types/elisiontokenfilter.go +++ b/typedapi/types/elisiontokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ElisionTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L187-L192 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L189-L194 type ElisionTokenFilter struct { Articles []string `json:"articles,omitempty"` ArticlesCase Stringifiedboolean `json:"articles_case,omitempty"` diff --git a/typedapi/types/email.go b/typedapi/types/email.go index d228548e35..c391dfb196 100644 --- a/typedapi/types/email.go +++ b/typedapi/types/email.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // Email type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L238-L250 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L238-L250 type Email struct { Attachments map[string]EmailAttachmentContainer `json:"attachments,omitempty"` Bcc []string `json:"bcc,omitempty"` diff --git a/typedapi/types/emailaction.go b/typedapi/types/emailaction.go index c10f00f71f..0d284fb83d 100644 --- a/typedapi/types/emailaction.go +++ b/typedapi/types/emailaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // EmailAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L252-L252 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L252-L252 type EmailAction struct { Attachments map[string]EmailAttachmentContainer `json:"attachments,omitempty"` Bcc []string `json:"bcc,omitempty"` diff --git a/typedapi/types/emailattachmentcontainer.go b/typedapi/types/emailattachmentcontainer.go index 6e2b03f883..b44584cde8 100644 --- a/typedapi/types/emailattachmentcontainer.go +++ b/typedapi/types/emailattachmentcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // EmailAttachmentContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L211-L216 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L211-L216 type EmailAttachmentContainer struct { Data *DataEmailAttachment `json:"data,omitempty"` Http *HttpEmailAttachment `json:"http,omitempty"` diff --git a/typedapi/types/emailbody.go b/typedapi/types/emailbody.go index 21f449f056..89209421dc 100644 --- a/typedapi/types/emailbody.go +++ b/typedapi/types/emailbody.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // EmailBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L192-L195 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L192-L195 type EmailBody struct { Html *string `json:"html,omitempty"` Text *string `json:"text,omitempty"` diff --git a/typedapi/types/emailresult.go b/typedapi/types/emailresult.go index 31e2562bc0..e21a9b95f6 100644 --- a/typedapi/types/emailresult.go +++ b/typedapi/types/emailresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // EmailResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L205-L209 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L205-L209 type EmailResult struct { Account *string `json:"account,omitempty"` Message Email `json:"message"` diff --git a/typedapi/types/emptyobject.go b/typedapi/types/emptyobject.go index c9683b69a4..0e22761089 100644 --- a/typedapi/types/emptyobject.go +++ b/typedapi/types/emptyobject.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // EmptyObject type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L160-L161 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L160-L161 type EmptyObject struct { } diff --git a/typedapi/types/enrichpolicy.go b/typedapi/types/enrichpolicy.go index 2e58bf92c0..2b435d1ce2 100644 --- a/typedapi/types/enrichpolicy.go +++ b/typedapi/types/enrichpolicy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // EnrichPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/enrich/_types/Policy.ts#L34-L41 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/enrich/_types/Policy.ts#L34-L41 type EnrichPolicy struct { ElasticsearchVersion *string `json:"elasticsearch_version,omitempty"` EnrichFields []string `json:"enrich_fields"` diff --git a/typedapi/types/enrichprocessor.go b/typedapi/types/enrichprocessor.go index 336a7bd2b7..7138bcd48b 100644 --- a/typedapi/types/enrichprocessor.go +++ b/typedapi/types/enrichprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // EnrichProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L607-L646 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L607-L646 type EnrichProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -121,7 +121,7 @@ func (s *EnrichProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -135,7 +135,7 @@ func (s *EnrichProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -150,7 +150,7 @@ func (s *EnrichProcessor) UnmarshalJSON(data []byte) error { case "max_matches": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -170,7 +170,7 @@ func (s *EnrichProcessor) UnmarshalJSON(data []byte) error { } case "override": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/ensemble.go b/typedapi/types/ensemble.go index ccf8858c1d..43945a7dcd 100644 --- a/typedapi/types/ensemble.go +++ b/typedapi/types/ensemble.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Ensemble type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model/types.ts#L93-L99 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model/types.ts#L93-L99 type Ensemble struct { AggregateOutput *AggregateOutput `json:"aggregate_output,omitempty"` ClassificationLabels []string `json:"classification_labels,omitempty"` diff --git a/typedapi/types/enums/accesstokengranttype/accesstokengranttype.go b/typedapi/types/enums/accesstokengranttype/accesstokengranttype.go index 63bb2c5fab..5bafdba628 100644 --- a/typedapi/types/enums/accesstokengranttype/accesstokengranttype.go +++ b/typedapi/types/enums/accesstokengranttype/accesstokengranttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package accesstokengranttype package accesstokengranttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_token/types.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_token/types.ts#L23-L28 type AccessTokenGrantType struct { Name string } diff --git a/typedapi/types/enums/acknowledgementoptions/acknowledgementoptions.go b/typedapi/types/enums/acknowledgementoptions/acknowledgementoptions.go index bdd624af2f..72d2c084c0 100644 --- a/typedapi/types/enums/acknowledgementoptions/acknowledgementoptions.go +++ b/typedapi/types/enums/acknowledgementoptions/acknowledgementoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package acknowledgementoptions package acknowledgementoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Action.ts#L109-L113 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Action.ts#L109-L113 type AcknowledgementOptions struct { Name string } diff --git a/typedapi/types/enums/actionexecutionmode/actionexecutionmode.go b/typedapi/types/enums/actionexecutionmode/actionexecutionmode.go index 97d402d6d8..cbd5f874fd 100644 --- a/typedapi/types/enums/actionexecutionmode/actionexecutionmode.go +++ b/typedapi/types/enums/actionexecutionmode/actionexecutionmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package actionexecutionmode package actionexecutionmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Action.ts#L73-L94 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Action.ts#L73-L94 type ActionExecutionMode struct { Name string } diff --git a/typedapi/types/enums/actionstatusoptions/actionstatusoptions.go b/typedapi/types/enums/actionstatusoptions/actionstatusoptions.go index 0aef723597..73c68fbd1f 100644 --- a/typedapi/types/enums/actionstatusoptions/actionstatusoptions.go +++ b/typedapi/types/enums/actionstatusoptions/actionstatusoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package actionstatusoptions package actionstatusoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Action.ts#L102-L107 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Action.ts#L102-L107 type ActionStatusOptions struct { Name string } diff --git a/typedapi/types/enums/actiontype/actiontype.go b/typedapi/types/enums/actiontype/actiontype.go index 73ca239ccc..b380c74ad1 100644 --- a/typedapi/types/enums/actiontype/actiontype.go +++ b/typedapi/types/enums/actiontype/actiontype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package actiontype package actiontype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Action.ts#L64-L71 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Action.ts#L64-L71 type ActionType struct { Name string } diff --git a/typedapi/types/enums/allocationexplaindecision/allocationexplaindecision.go b/typedapi/types/enums/allocationexplaindecision/allocationexplaindecision.go index 9328e355a6..302313d8e3 100644 --- a/typedapi/types/enums/allocationexplaindecision/allocationexplaindecision.go +++ b/typedapi/types/enums/allocationexplaindecision/allocationexplaindecision.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package allocationexplaindecision package allocationexplaindecision import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/allocation_explain/types.ts#L32-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/allocation_explain/types.ts#L32-L37 type AllocationExplainDecision struct { Name string } diff --git a/typedapi/types/enums/apikeygranttype/apikeygranttype.go b/typedapi/types/enums/apikeygranttype/apikeygranttype.go index 41382a5b46..cd93c57587 100644 --- a/typedapi/types/enums/apikeygranttype/apikeygranttype.go +++ b/typedapi/types/enums/apikeygranttype/apikeygranttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package apikeygranttype package apikeygranttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/grant_api_key/types.ts#L48-L51 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/grant_api_key/types.ts#L48-L51 type ApiKeyGrantType struct { Name string } diff --git a/typedapi/types/enums/appliesto/appliesto.go b/typedapi/types/enums/appliesto/appliesto.go index e3399b8b9a..d6570f6c61 100644 --- a/typedapi/types/enums/appliesto/appliesto.go +++ b/typedapi/types/enums/appliesto/appliesto.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package appliesto package appliesto import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Rule.ts#L67-L72 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Rule.ts#L67-L72 type AppliesTo struct { Name string } diff --git a/typedapi/types/enums/boundaryscanner/boundaryscanner.go b/typedapi/types/enums/boundaryscanner/boundaryscanner.go index 8c499f1db6..3a5dda9a6e 100644 --- a/typedapi/types/enums/boundaryscanner/boundaryscanner.go +++ b/typedapi/types/enums/boundaryscanner/boundaryscanner.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package boundaryscanner package boundaryscanner import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/highlighting.ts#L27-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/highlighting.ts#L27-L46 type BoundaryScanner struct { Name string } diff --git a/typedapi/types/enums/bytes/bytes.go b/typedapi/types/enums/bytes/bytes.go index 5ef4ae4dde..c9397c2128 100644 --- a/typedapi/types/enums/bytes/bytes.go +++ b/typedapi/types/enums/bytes/bytes.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package bytes package bytes import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L169-L181 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L169-L181 type Bytes struct { Name string } diff --git a/typedapi/types/enums/calendarinterval/calendarinterval.go b/typedapi/types/enums/calendarinterval/calendarinterval.go index 3be3200eca..f53f9b5c51 100644 --- a/typedapi/types/enums/calendarinterval/calendarinterval.go +++ b/typedapi/types/enums/calendarinterval/calendarinterval.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package calendarinterval package calendarinterval import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L249-L266 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L251-L268 type CalendarInterval struct { Name string } diff --git a/typedapi/types/enums/cardinalityexecutionmode/cardinalityexecutionmode.go b/typedapi/types/enums/cardinalityexecutionmode/cardinalityexecutionmode.go index 9f80e984a1..30ead2b82d 100644 --- a/typedapi/types/enums/cardinalityexecutionmode/cardinalityexecutionmode.go +++ b/typedapi/types/enums/cardinalityexecutionmode/cardinalityexecutionmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package cardinalityexecutionmode package cardinalityexecutionmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L64-L85 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L64-L85 type CardinalityExecutionMode struct { Name string } diff --git a/typedapi/types/enums/catanomalydetectorcolumn/catanomalydetectorcolumn.go b/typedapi/types/enums/catanomalydetectorcolumn/catanomalydetectorcolumn.go index 1a74b492d2..ca91d5cd0f 100644 --- a/typedapi/types/enums/catanomalydetectorcolumn/catanomalydetectorcolumn.go +++ b/typedapi/types/enums/catanomalydetectorcolumn/catanomalydetectorcolumn.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package catanomalydetectorcolumn package catanomalydetectorcolumn import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/_types/CatBase.ts#L32-L401 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/_types/CatBase.ts#L32-L401 type CatAnomalyDetectorColumn struct { Name string } diff --git a/typedapi/types/enums/catdatafeedcolumn/catdatafeedcolumn.go b/typedapi/types/enums/catdatafeedcolumn/catdatafeedcolumn.go index d506bc9a19..7912f70de9 100644 --- a/typedapi/types/enums/catdatafeedcolumn/catdatafeedcolumn.go +++ b/typedapi/types/enums/catdatafeedcolumn/catdatafeedcolumn.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package catdatafeedcolumn package catdatafeedcolumn import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/_types/CatBase.ts#L405-L471 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/_types/CatBase.ts#L405-L471 type CatDatafeedColumn struct { Name string } diff --git a/typedapi/types/enums/catdfacolumn/catdfacolumn.go b/typedapi/types/enums/catdfacolumn/catdfacolumn.go index c34f2cc07a..d2a1a28863 100644 --- a/typedapi/types/enums/catdfacolumn/catdfacolumn.go +++ b/typedapi/types/enums/catdfacolumn/catdfacolumn.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package catdfacolumn package catdfacolumn import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/_types/CatBase.ts#L472-L557 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/_types/CatBase.ts#L472-L557 type CatDfaColumn struct { Name string } diff --git a/typedapi/types/enums/categorizationstatus/categorizationstatus.go b/typedapi/types/enums/categorizationstatus/categorizationstatus.go index 01b76611e2..76098a6ebe 100644 --- a/typedapi/types/enums/categorizationstatus/categorizationstatus.go +++ b/typedapi/types/enums/categorizationstatus/categorizationstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package categorizationstatus package categorizationstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Model.ts#L83-L86 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Model.ts#L83-L86 type CategorizationStatus struct { Name string } diff --git a/typedapi/types/enums/cattrainedmodelscolumn/cattrainedmodelscolumn.go b/typedapi/types/enums/cattrainedmodelscolumn/cattrainedmodelscolumn.go index 0bc812e79c..4ab2faa93f 100644 --- a/typedapi/types/enums/cattrainedmodelscolumn/cattrainedmodelscolumn.go +++ b/typedapi/types/enums/cattrainedmodelscolumn/cattrainedmodelscolumn.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package cattrainedmodelscolumn package cattrainedmodelscolumn import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/_types/CatBase.ts#L561-L635 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/_types/CatBase.ts#L561-L635 type CatTrainedModelsColumn struct { Name string } diff --git a/typedapi/types/enums/cattransformcolumn/cattransformcolumn.go b/typedapi/types/enums/cattransformcolumn/cattransformcolumn.go index 600249e75e..c03754dfc6 100644 --- a/typedapi/types/enums/cattransformcolumn/cattransformcolumn.go +++ b/typedapi/types/enums/cattransformcolumn/cattransformcolumn.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package cattransformcolumn package cattransformcolumn import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/_types/CatBase.ts#L640-L844 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/_types/CatBase.ts#L640-L844 type CatTransformColumn struct { Name string } diff --git a/typedapi/types/enums/childscoremode/childscoremode.go b/typedapi/types/enums/childscoremode/childscoremode.go index 56af561c42..84c3d668f2 100644 --- a/typedapi/types/enums/childscoremode/childscoremode.go +++ b/typedapi/types/enums/childscoremode/childscoremode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package childscoremode package childscoremode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/joining.ts#L25-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/joining.ts#L25-L39 type ChildScoreMode struct { Name string } diff --git a/typedapi/types/enums/chunkingmode/chunkingmode.go b/typedapi/types/enums/chunkingmode/chunkingmode.go index 38f3e492af..02a3597429 100644 --- a/typedapi/types/enums/chunkingmode/chunkingmode.go +++ b/typedapi/types/enums/chunkingmode/chunkingmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package chunkingmode package chunkingmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Datafeed.ts#L233-L237 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Datafeed.ts#L232-L236 type ChunkingMode struct { Name string } diff --git a/typedapi/types/enums/clusterinfotarget/clusterinfotarget.go b/typedapi/types/enums/clusterinfotarget/clusterinfotarget.go index ae34404627..cd6535f4ce 100644 --- a/typedapi/types/enums/clusterinfotarget/clusterinfotarget.go +++ b/typedapi/types/enums/clusterinfotarget/clusterinfotarget.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package clusterinfotarget package clusterinfotarget import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L378-L384 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L381-L387 type ClusterInfoTarget struct { Name string } diff --git a/typedapi/types/enums/clusterprivilege/clusterprivilege.go b/typedapi/types/enums/clusterprivilege/clusterprivilege.go index cf768d66dc..30bbc93842 100644 --- a/typedapi/types/enums/clusterprivilege/clusterprivilege.go +++ b/typedapi/types/enums/clusterprivilege/clusterprivilege.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package clusterprivilege package clusterprivilege import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/Privileges.ts#L41-L80 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/Privileges.ts#L41-L198 type ClusterPrivilege struct { Name string } @@ -35,20 +35,36 @@ var ( Createsnapshot = ClusterPrivilege{"create_snapshot"} + Crossclusterreplication = ClusterPrivilege{"cross_cluster_replication"} + + Crossclustersearch = ClusterPrivilege{"cross_cluster_search"} + + Delegatepki = ClusterPrivilege{"delegate_pki"} + Grantapikey = ClusterPrivilege{"grant_api_key"} Manage = ClusterPrivilege{"manage"} Manageapikey = ClusterPrivilege{"manage_api_key"} + Manageautoscaling = ClusterPrivilege{"manage_autoscaling"} + + Managebehavioralanalytics = ClusterPrivilege{"manage_behavioral_analytics"} + Manageccr = ClusterPrivilege{"manage_ccr"} + Managedataframetransforms = ClusterPrivilege{"manage_data_frame_transforms"} + + Managedatastreamglobalretention = ClusterPrivilege{"manage_data_stream_global_retention"} + Manageenrich = ClusterPrivilege{"manage_enrich"} Manageilm = ClusterPrivilege{"manage_ilm"} Manageindextemplates = ClusterPrivilege{"manage_index_templates"} + Manageinference = ClusterPrivilege{"manage_inference"} + Manageingestpipelines = ClusterPrivilege{"manage_ingest_pipelines"} Managelogstashpipelines = ClusterPrivilege{"manage_logstash_pipelines"} @@ -65,6 +81,12 @@ var ( Managesaml = ClusterPrivilege{"manage_saml"} + Managesearchapplication = ClusterPrivilege{"manage_search_application"} + + Managesearchqueryrules = ClusterPrivilege{"manage_search_query_rules"} + + Managesearchsynonyms = ClusterPrivilege{"manage_search_synonyms"} + Managesecurity = ClusterPrivilege{"manage_security"} Manageserviceaccount = ClusterPrivilege{"manage_service_account"} @@ -81,6 +103,14 @@ var ( Monitor = ClusterPrivilege{"monitor"} + Monitordataframetransforms = ClusterPrivilege{"monitor_data_frame_transforms"} + + Monitordatastreamglobalretention = ClusterPrivilege{"monitor_data_stream_global_retention"} + + Monitorenrich = ClusterPrivilege{"monitor_enrich"} + + Monitorinference = ClusterPrivilege{"monitor_inference"} + Monitorml = ClusterPrivilege{"monitor_ml"} Monitorrollup = ClusterPrivilege{"monitor_rollup"} @@ -93,15 +123,29 @@ var ( Monitorwatcher = ClusterPrivilege{"monitor_watcher"} + None = ClusterPrivilege{"none"} + + Postbehavioralanalyticsevent = ClusterPrivilege{"post_behavioral_analytics_event"} + Readccr = ClusterPrivilege{"read_ccr"} + Readconnectorsecrets = ClusterPrivilege{"read_connector_secrets"} + + Readfleetsecrets = ClusterPrivilege{"read_fleet_secrets"} + Readilm = ClusterPrivilege{"read_ilm"} Readpipeline = ClusterPrivilege{"read_pipeline"} + Readsecurity = ClusterPrivilege{"read_security"} + Readslm = ClusterPrivilege{"read_slm"} Transportclient = ClusterPrivilege{"transport_client"} + + Writeconnectorsecrets = ClusterPrivilege{"write_connector_secrets"} + + Writefleetsecrets = ClusterPrivilege{"write_fleet_secrets"} ) func (c ClusterPrivilege) MarshalText() (text []byte, err error) { @@ -117,20 +161,36 @@ func (c *ClusterPrivilege) UnmarshalText(text []byte) error { *c = Canceltask case "create_snapshot": *c = Createsnapshot + case "cross_cluster_replication": + *c = Crossclusterreplication + case "cross_cluster_search": + *c = Crossclustersearch + case "delegate_pki": + *c = Delegatepki case "grant_api_key": *c = Grantapikey case "manage": *c = Manage case "manage_api_key": *c = Manageapikey + case "manage_autoscaling": + *c = Manageautoscaling + case "manage_behavioral_analytics": + *c = Managebehavioralanalytics case "manage_ccr": *c = Manageccr + case "manage_data_frame_transforms": + *c = Managedataframetransforms + case "manage_data_stream_global_retention": + *c = Managedatastreamglobalretention case "manage_enrich": *c = Manageenrich case "manage_ilm": *c = Manageilm case "manage_index_templates": *c = Manageindextemplates + case "manage_inference": + *c = Manageinference case "manage_ingest_pipelines": *c = Manageingestpipelines case "manage_logstash_pipelines": @@ -147,6 +207,12 @@ func (c *ClusterPrivilege) UnmarshalText(text []byte) error { *c = Managerollup case "manage_saml": *c = Managesaml + case "manage_search_application": + *c = Managesearchapplication + case "manage_search_query_rules": + *c = Managesearchqueryrules + case "manage_search_synonyms": + *c = Managesearchsynonyms case "manage_security": *c = Managesecurity case "manage_service_account": @@ -163,6 +229,14 @@ func (c *ClusterPrivilege) UnmarshalText(text []byte) error { *c = Managewatcher case "monitor": *c = Monitor + case "monitor_data_frame_transforms": + *c = Monitordataframetransforms + case "monitor_data_stream_global_retention": + *c = Monitordatastreamglobalretention + case "monitor_enrich": + *c = Monitorenrich + case "monitor_inference": + *c = Monitorinference case "monitor_ml": *c = Monitorml case "monitor_rollup": @@ -175,16 +249,30 @@ func (c *ClusterPrivilege) UnmarshalText(text []byte) error { *c = Monitortransform case "monitor_watcher": *c = Monitorwatcher + case "none": + *c = None + case "post_behavioral_analytics_event": + *c = Postbehavioralanalyticsevent case "read_ccr": *c = Readccr + case "read_connector_secrets": + *c = Readconnectorsecrets + case "read_fleet_secrets": + *c = Readfleetsecrets case "read_ilm": *c = Readilm case "read_pipeline": *c = Readpipeline + case "read_security": + *c = Readsecurity case "read_slm": *c = Readslm case "transport_client": *c = Transportclient + case "write_connector_secrets": + *c = Writeconnectorsecrets + case "write_fleet_secrets": + *c = Writefleetsecrets default: *c = ClusterPrivilege{string(text)} } diff --git a/typedapi/types/enums/clustersearchstatus/clustersearchstatus.go b/typedapi/types/enums/clustersearchstatus/clustersearchstatus.go index b47a5c0bf9..8e418c1cd5 100644 --- a/typedapi/types/enums/clustersearchstatus/clustersearchstatus.go +++ b/typedapi/types/enums/clustersearchstatus/clustersearchstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package clustersearchstatus package clustersearchstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L37-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L37-L43 type ClusterSearchStatus struct { Name string } diff --git a/typedapi/types/enums/combinedfieldsoperator/combinedfieldsoperator.go b/typedapi/types/enums/combinedfieldsoperator/combinedfieldsoperator.go index 7a0848d284..b83cfe9fee 100644 --- a/typedapi/types/enums/combinedfieldsoperator/combinedfieldsoperator.go +++ b/typedapi/types/enums/combinedfieldsoperator/combinedfieldsoperator.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package combinedfieldsoperator package combinedfieldsoperator import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/abstractions.ts#L489-L492 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/abstractions.ts#L509-L512 type CombinedFieldsOperator struct { Name string } diff --git a/typedapi/types/enums/combinedfieldszeroterms/combinedfieldszeroterms.go b/typedapi/types/enums/combinedfieldszeroterms/combinedfieldszeroterms.go index 7b75f95a28..b13cc40a20 100644 --- a/typedapi/types/enums/combinedfieldszeroterms/combinedfieldszeroterms.go +++ b/typedapi/types/enums/combinedfieldszeroterms/combinedfieldszeroterms.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package combinedfieldszeroterms package combinedfieldszeroterms import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/abstractions.ts#L494-L503 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/abstractions.ts#L514-L523 type CombinedFieldsZeroTerms struct { Name string } diff --git a/typedapi/types/enums/conditionop/conditionop.go b/typedapi/types/enums/conditionop/conditionop.go index e064833dad..3f7ccb4b62 100644 --- a/typedapi/types/enums/conditionop/conditionop.go +++ b/typedapi/types/enums/conditionop/conditionop.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package conditionop package conditionop import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Conditions.ts#L38-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Conditions.ts#L41-L48 type ConditionOp struct { Name string } diff --git a/typedapi/types/enums/conditionoperator/conditionoperator.go b/typedapi/types/enums/conditionoperator/conditionoperator.go index b1cb4907aa..15e4b27965 100644 --- a/typedapi/types/enums/conditionoperator/conditionoperator.go +++ b/typedapi/types/enums/conditionoperator/conditionoperator.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package conditionoperator package conditionoperator import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Rule.ts#L74-L79 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Rule.ts#L74-L79 type ConditionOperator struct { Name string } diff --git a/typedapi/types/enums/conditiontype/conditiontype.go b/typedapi/types/enums/conditiontype/conditiontype.go index 1c7c7b4762..07c86250cb 100644 --- a/typedapi/types/enums/conditiontype/conditiontype.go +++ b/typedapi/types/enums/conditiontype/conditiontype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package conditiontype package conditiontype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Conditions.ts#L61-L67 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Conditions.ts#L64-L70 type ConditionType struct { Name string } diff --git a/typedapi/types/enums/conflicts/conflicts.go b/typedapi/types/enums/conflicts/conflicts.go index bd1c17339b..8b2d683af7 100644 --- a/typedapi/types/enums/conflicts/conflicts.go +++ b/typedapi/types/enums/conflicts/conflicts.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package conflicts package conflicts import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L183-L192 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L183-L192 type Conflicts struct { Name string } diff --git a/typedapi/types/enums/connectionscheme/connectionscheme.go b/typedapi/types/enums/connectionscheme/connectionscheme.go index 92ef10aeb5..b558c64f21 100644 --- a/typedapi/types/enums/connectionscheme/connectionscheme.go +++ b/typedapi/types/enums/connectionscheme/connectionscheme.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package connectionscheme package connectionscheme import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Input.ts#L39-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Input.ts#L39-L42 type ConnectionScheme struct { Name string } diff --git a/typedapi/types/enums/converttype/converttype.go b/typedapi/types/enums/converttype/converttype.go index 26a032b81f..7835dfaafc 100644 --- a/typedapi/types/enums/converttype/converttype.go +++ b/typedapi/types/enums/converttype/converttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package converttype package converttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L435-L443 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L435-L443 type ConvertType struct { Name string } diff --git a/typedapi/types/enums/dataattachmentformat/dataattachmentformat.go b/typedapi/types/enums/dataattachmentformat/dataattachmentformat.go index 882f18a723..707937bc1b 100644 --- a/typedapi/types/enums/dataattachmentformat/dataattachmentformat.go +++ b/typedapi/types/enums/dataattachmentformat/dataattachmentformat.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package dataattachmentformat package dataattachmentformat import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L187-L190 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L187-L190 type DataAttachmentFormat struct { Name string } diff --git a/typedapi/types/enums/datafeedstate/datafeedstate.go b/typedapi/types/enums/datafeedstate/datafeedstate.go index 21dd2f4f22..68db6fb312 100644 --- a/typedapi/types/enums/datafeedstate/datafeedstate.go +++ b/typedapi/types/enums/datafeedstate/datafeedstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package datafeedstate package datafeedstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Datafeed.ts#L133-L138 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Datafeed.ts#L132-L137 type DatafeedState struct { Name string } diff --git a/typedapi/types/enums/dataframestate/dataframestate.go b/typedapi/types/enums/dataframestate/dataframestate.go index c2a5f67e51..551e075b1b 100644 --- a/typedapi/types/enums/dataframestate/dataframestate.go +++ b/typedapi/types/enums/dataframestate/dataframestate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package dataframestate package dataframestate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Dataframe.ts#L20-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Dataframe.ts#L20-L26 type DataframeState struct { Name string } diff --git a/typedapi/types/enums/day/day.go b/typedapi/types/enums/day/day.go index 480141e0c5..c20d9d75b9 100644 --- a/typedapi/types/enums/day/day.go +++ b/typedapi/types/enums/day/day.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package day package day import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Schedule.ts#L37-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Schedule.ts#L37-L45 type Day struct { Name string } diff --git a/typedapi/types/enums/decision/decision.go b/typedapi/types/enums/decision/decision.go index 284ff2843d..fcd8936066 100644 --- a/typedapi/types/enums/decision/decision.go +++ b/typedapi/types/enums/decision/decision.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package decision package decision import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/allocation_explain/types.ts#L86-L95 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/allocation_explain/types.ts#L86-L95 type Decision struct { Name string } diff --git a/typedapi/types/enums/delimitedpayloadencoding/delimitedpayloadencoding.go b/typedapi/types/enums/delimitedpayloadencoding/delimitedpayloadencoding.go index 85e2eaea1e..938178bd94 100644 --- a/typedapi/types/enums/delimitedpayloadencoding/delimitedpayloadencoding.go +++ b/typedapi/types/enums/delimitedpayloadencoding/delimitedpayloadencoding.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package delimitedpayloadencoding package delimitedpayloadencoding import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L62-L66 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L62-L66 type DelimitedPayloadEncoding struct { Name string } diff --git a/typedapi/types/enums/deploymentallocationstate/deploymentallocationstate.go b/typedapi/types/enums/deploymentallocationstate/deploymentallocationstate.go index 6167bf2df2..e42e972779 100644 --- a/typedapi/types/enums/deploymentallocationstate/deploymentallocationstate.go +++ b/typedapi/types/enums/deploymentallocationstate/deploymentallocationstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package deploymentallocationstate package deploymentallocationstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L289-L302 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L289-L302 type DeploymentAllocationState struct { Name string } diff --git a/typedapi/types/enums/deploymentassignmentstate/deploymentassignmentstate.go b/typedapi/types/enums/deploymentassignmentstate/deploymentassignmentstate.go index aa0ea56dde..6e51dc9ed9 100644 --- a/typedapi/types/enums/deploymentassignmentstate/deploymentassignmentstate.go +++ b/typedapi/types/enums/deploymentassignmentstate/deploymentassignmentstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package deploymentassignmentstate package deploymentassignmentstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L304-L309 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L304-L309 type DeploymentAssignmentState struct { Name string } diff --git a/typedapi/types/enums/deploymentstate/deploymentstate.go b/typedapi/types/enums/deploymentstate/deploymentstate.go index c07da6c147..5092115905 100644 --- a/typedapi/types/enums/deploymentstate/deploymentstate.go +++ b/typedapi/types/enums/deploymentstate/deploymentstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package deploymentstate package deploymentstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L274-L287 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L274-L287 type DeploymentState struct { Name string } diff --git a/typedapi/types/enums/deprecationlevel/deprecationlevel.go b/typedapi/types/enums/deprecationlevel/deprecationlevel.go index 9487f426d9..5d53378af7 100644 --- a/typedapi/types/enums/deprecationlevel/deprecationlevel.go +++ b/typedapi/types/enums/deprecationlevel/deprecationlevel.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package deprecationlevel package deprecationlevel import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/migration/deprecations/types.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/migration/deprecations/types.ts#L20-L27 type DeprecationLevel struct { Name string } diff --git a/typedapi/types/enums/dfiindependencemeasure/dfiindependencemeasure.go b/typedapi/types/enums/dfiindependencemeasure/dfiindependencemeasure.go index 807bc2e96d..466104661d 100644 --- a/typedapi/types/enums/dfiindependencemeasure/dfiindependencemeasure.go +++ b/typedapi/types/enums/dfiindependencemeasure/dfiindependencemeasure.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package dfiindependencemeasure package dfiindependencemeasure import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Similarity.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Similarity.ts#L20-L24 type DFIIndependenceMeasure struct { Name string } diff --git a/typedapi/types/enums/dfraftereffect/dfraftereffect.go b/typedapi/types/enums/dfraftereffect/dfraftereffect.go index 6455bd5cc6..f92125ba3d 100644 --- a/typedapi/types/enums/dfraftereffect/dfraftereffect.go +++ b/typedapi/types/enums/dfraftereffect/dfraftereffect.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package dfraftereffect package dfraftereffect import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Similarity.ts#L26-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Similarity.ts#L26-L30 type DFRAfterEffect struct { Name string } diff --git a/typedapi/types/enums/dfrbasicmodel/dfrbasicmodel.go b/typedapi/types/enums/dfrbasicmodel/dfrbasicmodel.go index 1341a99e7f..139efe2f4f 100644 --- a/typedapi/types/enums/dfrbasicmodel/dfrbasicmodel.go +++ b/typedapi/types/enums/dfrbasicmodel/dfrbasicmodel.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package dfrbasicmodel package dfrbasicmodel import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Similarity.ts#L32-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Similarity.ts#L32-L40 type DFRBasicModel struct { Name string } diff --git a/typedapi/types/enums/distanceunit/distanceunit.go b/typedapi/types/enums/distanceunit/distanceunit.go index fc2d73f06d..f13ed62ecf 100644 --- a/typedapi/types/enums/distanceunit/distanceunit.go +++ b/typedapi/types/enums/distanceunit/distanceunit.go @@ -16,36 +16,36 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package distanceunit package distanceunit import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Geo.ts#L30-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Geo.ts#L30-L40 type DistanceUnit struct { Name string } var ( - In = DistanceUnit{"in"} + Inches = DistanceUnit{"in"} - Ft = DistanceUnit{"ft"} + Feet = DistanceUnit{"ft"} - Yd = DistanceUnit{"yd"} + Yards = DistanceUnit{"yd"} - Mi = DistanceUnit{"mi"} + Miles = DistanceUnit{"mi"} - Nmi = DistanceUnit{"nmi"} + Nauticmiles = DistanceUnit{"nmi"} - Km = DistanceUnit{"km"} + Kilometers = DistanceUnit{"km"} - M = DistanceUnit{"m"} + Meters = DistanceUnit{"m"} - Cm = DistanceUnit{"cm"} + Centimeters = DistanceUnit{"cm"} - Mm = DistanceUnit{"mm"} + Millimeters = DistanceUnit{"mm"} ) func (d DistanceUnit) MarshalText() (text []byte, err error) { @@ -56,23 +56,23 @@ func (d *DistanceUnit) UnmarshalText(text []byte) error { switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "in": - *d = In + *d = Inches case "ft": - *d = Ft + *d = Feet case "yd": - *d = Yd + *d = Yards case "mi": - *d = Mi + *d = Miles case "nmi": - *d = Nmi + *d = Nauticmiles case "km": - *d = Km + *d = Kilometers case "m": - *d = M + *d = Meters case "cm": - *d = Cm + *d = Centimeters case "mm": - *d = Mm + *d = Millimeters default: *d = DistanceUnit{string(text)} } diff --git a/typedapi/types/enums/dynamicmapping/dynamicmapping.go b/typedapi/types/enums/dynamicmapping/dynamicmapping.go index 18eb98d902..c9514c05a2 100644 --- a/typedapi/types/enums/dynamicmapping/dynamicmapping.go +++ b/typedapi/types/enums/dynamicmapping/dynamicmapping.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package dynamicmapping package dynamicmapping import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/dynamic-template.ts#L37-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/dynamic-template.ts#L49-L58 type DynamicMapping struct { Name string } diff --git a/typedapi/types/enums/edgengramside/edgengramside.go b/typedapi/types/enums/edgengramside/edgengramside.go index 77dc5b7299..b3d1703418 100644 --- a/typedapi/types/enums/edgengramside/edgengramside.go +++ b/typedapi/types/enums/edgengramside/edgengramside.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package edgengramside package edgengramside import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L74-L77 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L74-L77 type EdgeNGramSide struct { Name string } diff --git a/typedapi/types/enums/emailpriority/emailpriority.go b/typedapi/types/enums/emailpriority/emailpriority.go index bee3306cfa..ab99d34543 100644 --- a/typedapi/types/enums/emailpriority/emailpriority.go +++ b/typedapi/types/enums/emailpriority/emailpriority.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package emailpriority package emailpriority import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L197-L203 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L197-L203 type EmailPriority struct { Name string } diff --git a/typedapi/types/enums/enrichpolicyphase/enrichpolicyphase.go b/typedapi/types/enums/enrichpolicyphase/enrichpolicyphase.go index f03cb7611e..2b576873c9 100644 --- a/typedapi/types/enums/enrichpolicyphase/enrichpolicyphase.go +++ b/typedapi/types/enums/enrichpolicyphase/enrichpolicyphase.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package enrichpolicyphase package enrichpolicyphase import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/enrich/execute_policy/types.ts#L24-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/enrich/execute_policy/types.ts#L24-L29 type EnrichPolicyPhase struct { Name string } diff --git a/typedapi/types/enums/excludefrequent/excludefrequent.go b/typedapi/types/enums/excludefrequent/excludefrequent.go index 04c315c619..f97a057ec9 100644 --- a/typedapi/types/enums/excludefrequent/excludefrequent.go +++ b/typedapi/types/enums/excludefrequent/excludefrequent.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package excludefrequent package excludefrequent import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Detector.ts#L127-L132 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Detector.ts#L127-L132 type ExcludeFrequent struct { Name string } diff --git a/typedapi/types/enums/executionphase/executionphase.go b/typedapi/types/enums/executionphase/executionphase.go index b0f57ee6f0..922f552cd4 100644 --- a/typedapi/types/enums/executionphase/executionphase.go +++ b/typedapi/types/enums/executionphase/executionphase.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package executionphase package executionphase import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Execution.ts#L49-L58 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Execution.ts#L49-L58 type ExecutionPhase struct { Name string } diff --git a/typedapi/types/enums/executionstatus/executionstatus.go b/typedapi/types/enums/executionstatus/executionstatus.go index 38f931a537..f91a8f97c3 100644 --- a/typedapi/types/enums/executionstatus/executionstatus.go +++ b/typedapi/types/enums/executionstatus/executionstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package executionstatus package executionstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Execution.ts#L38-L47 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Execution.ts#L38-L47 type ExecutionStatus struct { Name string } diff --git a/typedapi/types/enums/expandwildcard/expandwildcard.go b/typedapi/types/enums/expandwildcard/expandwildcard.go index 0ae3e0ff14..5e1a6379a4 100644 --- a/typedapi/types/enums/expandwildcard/expandwildcard.go +++ b/typedapi/types/enums/expandwildcard/expandwildcard.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package expandwildcard package expandwildcard import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L201-L215 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L201-L215 type ExpandWildcard struct { Name string } diff --git a/typedapi/types/enums/feature/feature.go b/typedapi/types/enums/feature/feature.go index a7aab5c077..8151c8fae1 100644 --- a/typedapi/types/enums/feature/feature.go +++ b/typedapi/types/enums/feature/feature.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package feature package feature import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get/IndicesGetRequest.ts#L90-L94 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get/IndicesGetRequest.ts#L90-L94 type Feature struct { Name string } diff --git a/typedapi/types/enums/fieldsortnumerictype/fieldsortnumerictype.go b/typedapi/types/enums/fieldsortnumerictype/fieldsortnumerictype.go index 4b3b9d6f36..e6e49e5bae 100644 --- a/typedapi/types/enums/fieldsortnumerictype/fieldsortnumerictype.go +++ b/typedapi/types/enums/fieldsortnumerictype/fieldsortnumerictype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package fieldsortnumerictype package fieldsortnumerictype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/sort.ts#L37-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/sort.ts#L37-L42 type FieldSortNumericType struct { Name string } diff --git a/typedapi/types/enums/fieldtype/fieldtype.go b/typedapi/types/enums/fieldtype/fieldtype.go index 98838278ce..831a3b0349 100644 --- a/typedapi/types/enums/fieldtype/fieldtype.go +++ b/typedapi/types/enums/fieldtype/fieldtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package fieldtype package fieldtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/Property.ts#L160-L204 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/Property.ts#L166-L213 type FieldType struct { Name string } @@ -57,6 +57,8 @@ var ( Object = FieldType{"object"} + Version = FieldType{"version"} + Murmur3 = FieldType{"murmur3"} Tokencount = FieldType{"token_count"} @@ -111,9 +113,13 @@ var ( Densevector = FieldType{"dense_vector"} + Semantictext = FieldType{"semantic_text"} + Sparsevector = FieldType{"sparse_vector"} Matchonlytext = FieldType{"match_only_text"} + + Icucollationkeyword = FieldType{"icu_collation_keyword"} ) func (f FieldType) MarshalText() (text []byte, err error) { @@ -151,6 +157,8 @@ func (f *FieldType) UnmarshalText(text []byte) error { *f = Nested case "object": *f = Object + case "version": + *f = Version case "murmur3": *f = Murmur3 case "token_count": @@ -205,10 +213,14 @@ func (f *FieldType) UnmarshalText(text []byte) error { *f = Aggregatemetricdouble case "dense_vector": *f = Densevector + case "semantic_text": + *f = Semantictext case "sparse_vector": *f = Sparsevector case "match_only_text": *f = Matchonlytext + case "icu_collation_keyword": + *f = Icucollationkeyword default: *f = FieldType{string(text)} } diff --git a/typedapi/types/enums/fieldvaluefactormodifier/fieldvaluefactormodifier.go b/typedapi/types/enums/fieldvaluefactormodifier/fieldvaluefactormodifier.go index 3cebcaf95b..92ad496a3a 100644 --- a/typedapi/types/enums/fieldvaluefactormodifier/fieldvaluefactormodifier.go +++ b/typedapi/types/enums/fieldvaluefactormodifier/fieldvaluefactormodifier.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package fieldvaluefactormodifier package fieldvaluefactormodifier import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L298-L341 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L307-L350 type FieldValueFactorModifier struct { Name string } diff --git a/typedapi/types/enums/filtertype/filtertype.go b/typedapi/types/enums/filtertype/filtertype.go index 9e50af5537..983d66d137 100644 --- a/typedapi/types/enums/filtertype/filtertype.go +++ b/typedapi/types/enums/filtertype/filtertype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package filtertype package filtertype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Filter.ts#L43-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Filter.ts#L43-L46 type FilterType struct { Name string } diff --git a/typedapi/types/enums/followerindexstatus/followerindexstatus.go b/typedapi/types/enums/followerindexstatus/followerindexstatus.go index 6dff72e3fc..2ae51e2e04 100644 --- a/typedapi/types/enums/followerindexstatus/followerindexstatus.go +++ b/typedapi/types/enums/followerindexstatus/followerindexstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package followerindexstatus package followerindexstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/follow_info/types.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/follow_info/types.ts#L30-L33 type FollowerIndexStatus struct { Name string } diff --git a/typedapi/types/enums/functionboostmode/functionboostmode.go b/typedapi/types/enums/functionboostmode/functionboostmode.go index 2dadfc0676..ffa39f35de 100644 --- a/typedapi/types/enums/functionboostmode/functionboostmode.go +++ b/typedapi/types/enums/functionboostmode/functionboostmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package functionboostmode package functionboostmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L270-L296 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L279-L305 type FunctionBoostMode struct { Name string } diff --git a/typedapi/types/enums/functionscoremode/functionscoremode.go b/typedapi/types/enums/functionscoremode/functionscoremode.go index fe4a8ec6b7..3673bc3e22 100644 --- a/typedapi/types/enums/functionscoremode/functionscoremode.go +++ b/typedapi/types/enums/functionscoremode/functionscoremode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package functionscoremode package functionscoremode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L243-L268 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L252-L277 type FunctionScoreMode struct { Name string } diff --git a/typedapi/types/enums/gappolicy/gappolicy.go b/typedapi/types/enums/gappolicy/gappolicy.go index 316644286e..58804463a3 100644 --- a/typedapi/types/enums/gappolicy/gappolicy.go +++ b/typedapi/types/enums/gappolicy/gappolicy.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package gappolicy package gappolicy import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L61-L76 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L61-L76 type GapPolicy struct { Name string } diff --git a/typedapi/types/enums/geodistancetype/geodistancetype.go b/typedapi/types/enums/geodistancetype/geodistancetype.go index 95ac14a40c..e1bbf11543 100644 --- a/typedapi/types/enums/geodistancetype/geodistancetype.go +++ b/typedapi/types/enums/geodistancetype/geodistancetype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package geodistancetype package geodistancetype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Geo.ts#L42-L51 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Geo.ts#L42-L51 type GeoDistanceType struct { Name string } diff --git a/typedapi/types/enums/geoexecution/geoexecution.go b/typedapi/types/enums/geoexecution/geoexecution.go index 8eecd09c7a..26c168b7fb 100644 --- a/typedapi/types/enums/geoexecution/geoexecution.go +++ b/typedapi/types/enums/geoexecution/geoexecution.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package geoexecution package geoexecution import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/geo.ts#L52-L55 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/geo.ts#L55-L58 type GeoExecution struct { Name string } diff --git a/typedapi/types/enums/geoorientation/geoorientation.go b/typedapi/types/enums/geoorientation/geoorientation.go index 368a742883..470e7ef91d 100644 --- a/typedapi/types/enums/geoorientation/geoorientation.go +++ b/typedapi/types/enums/geoorientation/geoorientation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package geoorientation package geoorientation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/geo.ts#L34-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/geo.ts#L34-L39 type GeoOrientation struct { Name string } diff --git a/typedapi/types/enums/geoshaperelation/geoshaperelation.go b/typedapi/types/enums/geoshaperelation/geoshaperelation.go index fecbbbca8f..50d5b142cc 100644 --- a/typedapi/types/enums/geoshaperelation/geoshaperelation.go +++ b/typedapi/types/enums/geoshaperelation/geoshaperelation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package geoshaperelation package geoshaperelation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Geo.ts#L64-L82 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Geo.ts#L64-L82 type GeoShapeRelation struct { Name string } diff --git a/typedapi/types/enums/geostrategy/geostrategy.go b/typedapi/types/enums/geostrategy/geostrategy.go index aecb521cdc..6f06c2a99e 100644 --- a/typedapi/types/enums/geostrategy/geostrategy.go +++ b/typedapi/types/enums/geostrategy/geostrategy.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package geostrategy package geostrategy import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/geo.ts#L56-L59 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/geo.ts#L56-L59 type GeoStrategy struct { Name string } diff --git a/typedapi/types/enums/geovalidationmethod/geovalidationmethod.go b/typedapi/types/enums/geovalidationmethod/geovalidationmethod.go index 053b44302e..3bb2f0a903 100644 --- a/typedapi/types/enums/geovalidationmethod/geovalidationmethod.go +++ b/typedapi/types/enums/geovalidationmethod/geovalidationmethod.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package geovalidationmethod package geovalidationmethod import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/geo.ts#L147-L157 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/geo.ts#L159-L169 type GeoValidationMethod struct { Name string } diff --git a/typedapi/types/enums/granttype/granttype.go b/typedapi/types/enums/granttype/granttype.go index 56c84c66d9..ae523057df 100644 --- a/typedapi/types/enums/granttype/granttype.go +++ b/typedapi/types/enums/granttype/granttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package granttype package granttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/GrantType.ts#L20-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/GrantType.ts#L20-L29 type GrantType struct { Name string } diff --git a/typedapi/types/enums/gridaggregationtype/gridaggregationtype.go b/typedapi/types/enums/gridaggregationtype/gridaggregationtype.go index 7bb5eeb1da..c32b1c077e 100644 --- a/typedapi/types/enums/gridaggregationtype/gridaggregationtype.go +++ b/typedapi/types/enums/gridaggregationtype/gridaggregationtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package gridaggregationtype package gridaggregationtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search_mvt/_types/GridType.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search_mvt/_types/GridType.ts#L30-L33 type GridAggregationType struct { Name string } diff --git a/typedapi/types/enums/gridtype/gridtype.go b/typedapi/types/enums/gridtype/gridtype.go index d16d42ddb8..2dadfe7aae 100644 --- a/typedapi/types/enums/gridtype/gridtype.go +++ b/typedapi/types/enums/gridtype/gridtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package gridtype package gridtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search_mvt/_types/GridType.ts#L20-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search_mvt/_types/GridType.ts#L20-L28 type GridType struct { Name string } diff --git a/typedapi/types/enums/groupby/groupby.go b/typedapi/types/enums/groupby/groupby.go index bd20aabf24..657cd80062 100644 --- a/typedapi/types/enums/groupby/groupby.go +++ b/typedapi/types/enums/groupby/groupby.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package groupby package groupby import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/tasks/_types/GroupBy.ts#L20-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/tasks/_types/GroupBy.ts#L20-L27 type GroupBy struct { Name string } diff --git a/typedapi/types/enums/healthstatus/healthstatus.go b/typedapi/types/enums/healthstatus/healthstatus.go index fb1b8a14b0..11c9383b59 100644 --- a/typedapi/types/enums/healthstatus/healthstatus.go +++ b/typedapi/types/enums/healthstatus/healthstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package healthstatus package healthstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L219-L239 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L219-L239 type HealthStatus struct { Name string } diff --git a/typedapi/types/enums/highlighterencoder/highlighterencoder.go b/typedapi/types/enums/highlighterencoder/highlighterencoder.go index d5801ed05a..cf2c42e393 100644 --- a/typedapi/types/enums/highlighterencoder/highlighterencoder.go +++ b/typedapi/types/enums/highlighterencoder/highlighterencoder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package highlighterencoder package highlighterencoder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/highlighting.ts#L158-L161 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/highlighting.ts#L158-L161 type HighlighterEncoder struct { Name string } diff --git a/typedapi/types/enums/highlighterfragmenter/highlighterfragmenter.go b/typedapi/types/enums/highlighterfragmenter/highlighterfragmenter.go index 5fdebb5cc5..3eeb04c140 100644 --- a/typedapi/types/enums/highlighterfragmenter/highlighterfragmenter.go +++ b/typedapi/types/enums/highlighterfragmenter/highlighterfragmenter.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package highlighterfragmenter package highlighterfragmenter import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/highlighting.ts#L163-L166 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/highlighting.ts#L163-L166 type HighlighterFragmenter struct { Name string } diff --git a/typedapi/types/enums/highlighterorder/highlighterorder.go b/typedapi/types/enums/highlighterorder/highlighterorder.go index 138dcaa311..92c99b2bd9 100644 --- a/typedapi/types/enums/highlighterorder/highlighterorder.go +++ b/typedapi/types/enums/highlighterorder/highlighterorder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package highlighterorder package highlighterorder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/highlighting.ts#L168-L170 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/highlighting.ts#L168-L170 type HighlighterOrder struct { Name string } diff --git a/typedapi/types/enums/highlightertagsschema/highlightertagsschema.go b/typedapi/types/enums/highlightertagsschema/highlightertagsschema.go index 7a96f45b1e..805bf399cb 100644 --- a/typedapi/types/enums/highlightertagsschema/highlightertagsschema.go +++ b/typedapi/types/enums/highlightertagsschema/highlightertagsschema.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package highlightertagsschema package highlightertagsschema import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/highlighting.ts#L172-L174 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/highlighting.ts#L172-L174 type HighlighterTagsSchema struct { Name string } diff --git a/typedapi/types/enums/highlightertype/highlightertype.go b/typedapi/types/enums/highlightertype/highlightertype.go index a64bc85c9e..9747185085 100644 --- a/typedapi/types/enums/highlightertype/highlightertype.go +++ b/typedapi/types/enums/highlightertype/highlightertype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package highlightertype package highlightertype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/highlighting.ts#L176-L191 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/highlighting.ts#L176-L191 type HighlighterType struct { Name string } @@ -31,7 +31,7 @@ type HighlighterType struct { var ( Plain = HighlighterType{"plain"} - Fvh = HighlighterType{"fvh"} + Fastvector = HighlighterType{"fvh"} Unified = HighlighterType{"unified"} ) @@ -46,7 +46,7 @@ func (h *HighlighterType) UnmarshalText(text []byte) error { case "plain": *h = Plain case "fvh": - *h = Fvh + *h = Fastvector case "unified": *h = Unified default: diff --git a/typedapi/types/enums/holtwinterstype/holtwinterstype.go b/typedapi/types/enums/holtwinterstype/holtwinterstype.go index 6d076d8e18..65bbd19681 100644 --- a/typedapi/types/enums/holtwinterstype/holtwinterstype.go +++ b/typedapi/types/enums/holtwinterstype/holtwinterstype.go @@ -16,22 +16,22 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package holtwinterstype package holtwinterstype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L283-L286 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L283-L286 type HoltWintersType struct { Name string } var ( - Add = HoltWintersType{"add"} + Additive = HoltWintersType{"add"} - Mult = HoltWintersType{"mult"} + Multiplicative = HoltWintersType{"mult"} ) func (h HoltWintersType) MarshalText() (text []byte, err error) { @@ -42,9 +42,9 @@ func (h *HoltWintersType) UnmarshalText(text []byte) error { switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "add": - *h = Add + *h = Additive case "mult": - *h = Mult + *h = Multiplicative default: *h = HoltWintersType{string(text)} } diff --git a/typedapi/types/enums/httpinputmethod/httpinputmethod.go b/typedapi/types/enums/httpinputmethod/httpinputmethod.go index 7069f846ce..935d8ba9c2 100644 --- a/typedapi/types/enums/httpinputmethod/httpinputmethod.go +++ b/typedapi/types/enums/httpinputmethod/httpinputmethod.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package httpinputmethod package httpinputmethod import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Input.ts#L59-L65 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Input.ts#L59-L65 type HttpInputMethod struct { Name string } diff --git a/typedapi/types/enums/ibdistribution/ibdistribution.go b/typedapi/types/enums/ibdistribution/ibdistribution.go index ea7ac982ef..1e41f9e862 100644 --- a/typedapi/types/enums/ibdistribution/ibdistribution.go +++ b/typedapi/types/enums/ibdistribution/ibdistribution.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package ibdistribution package ibdistribution import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Similarity.ts#L42-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Similarity.ts#L42-L45 type IBDistribution struct { Name string } diff --git a/typedapi/types/enums/iblambda/iblambda.go b/typedapi/types/enums/iblambda/iblambda.go index 36e0e5bd9f..2f00077cb5 100644 --- a/typedapi/types/enums/iblambda/iblambda.go +++ b/typedapi/types/enums/iblambda/iblambda.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package iblambda package iblambda import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Similarity.ts#L47-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Similarity.ts#L47-L50 type IBLambda struct { Name string } diff --git a/typedapi/types/enums/icucollationalternate/icucollationalternate.go b/typedapi/types/enums/icucollationalternate/icucollationalternate.go index 65f837183e..c4a5d45829 100644 --- a/typedapi/types/enums/icucollationalternate/icucollationalternate.go +++ b/typedapi/types/enums/icucollationalternate/icucollationalternate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package icucollationalternate package icucollationalternate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/icu-plugin.ts#L89-L92 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/icu-plugin.ts#L89-L92 type IcuCollationAlternate struct { Name string } diff --git a/typedapi/types/enums/icucollationcasefirst/icucollationcasefirst.go b/typedapi/types/enums/icucollationcasefirst/icucollationcasefirst.go index f96fb8ff30..48a140b22d 100644 --- a/typedapi/types/enums/icucollationcasefirst/icucollationcasefirst.go +++ b/typedapi/types/enums/icucollationcasefirst/icucollationcasefirst.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package icucollationcasefirst package icucollationcasefirst import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/icu-plugin.ts#L94-L97 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/icu-plugin.ts#L94-L97 type IcuCollationCaseFirst struct { Name string } diff --git a/typedapi/types/enums/icucollationdecomposition/icucollationdecomposition.go b/typedapi/types/enums/icucollationdecomposition/icucollationdecomposition.go index 934f002da4..e739dd8621 100644 --- a/typedapi/types/enums/icucollationdecomposition/icucollationdecomposition.go +++ b/typedapi/types/enums/icucollationdecomposition/icucollationdecomposition.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package icucollationdecomposition package icucollationdecomposition import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/icu-plugin.ts#L99-L102 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/icu-plugin.ts#L99-L102 type IcuCollationDecomposition struct { Name string } diff --git a/typedapi/types/enums/icucollationstrength/icucollationstrength.go b/typedapi/types/enums/icucollationstrength/icucollationstrength.go index 3459c7c03b..8f1bd1bf5b 100644 --- a/typedapi/types/enums/icucollationstrength/icucollationstrength.go +++ b/typedapi/types/enums/icucollationstrength/icucollationstrength.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package icucollationstrength package icucollationstrength import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/icu-plugin.ts#L104-L110 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/icu-plugin.ts#L104-L110 type IcuCollationStrength struct { Name string } diff --git a/typedapi/types/enums/icunormalizationmode/icunormalizationmode.go b/typedapi/types/enums/icunormalizationmode/icunormalizationmode.go index 74c97619dc..c83be2b21a 100644 --- a/typedapi/types/enums/icunormalizationmode/icunormalizationmode.go +++ b/typedapi/types/enums/icunormalizationmode/icunormalizationmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package icunormalizationmode package icunormalizationmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/icu-plugin.ts#L78-L81 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/icu-plugin.ts#L78-L81 type IcuNormalizationMode struct { Name string } diff --git a/typedapi/types/enums/icunormalizationtype/icunormalizationtype.go b/typedapi/types/enums/icunormalizationtype/icunormalizationtype.go index 13e24ea23e..42e10f2aaf 100644 --- a/typedapi/types/enums/icunormalizationtype/icunormalizationtype.go +++ b/typedapi/types/enums/icunormalizationtype/icunormalizationtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package icunormalizationtype package icunormalizationtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/icu-plugin.ts#L83-L87 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/icu-plugin.ts#L83-L87 type IcuNormalizationType struct { Name string } diff --git a/typedapi/types/enums/icutransformdirection/icutransformdirection.go b/typedapi/types/enums/icutransformdirection/icutransformdirection.go index 21df83fde6..0c42c58d44 100644 --- a/typedapi/types/enums/icutransformdirection/icutransformdirection.go +++ b/typedapi/types/enums/icutransformdirection/icutransformdirection.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package icutransformdirection package icutransformdirection import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/icu-plugin.ts#L73-L76 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/icu-plugin.ts#L73-L76 type IcuTransformDirection struct { Name string } diff --git a/typedapi/types/enums/impactarea/impactarea.go b/typedapi/types/enums/impactarea/impactarea.go index 3b17cb4404..a1adbfd7c9 100644 --- a/typedapi/types/enums/impactarea/impactarea.go +++ b/typedapi/types/enums/impactarea/impactarea.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package impactarea package impactarea import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L72-L77 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L72-L77 type ImpactArea struct { Name string } diff --git a/typedapi/types/enums/include/include.go b/typedapi/types/enums/include/include.go index ee7f792940..a35d4aebe1 100644 --- a/typedapi/types/enums/include/include.go +++ b/typedapi/types/enums/include/include.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package include package include import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Include.ts#L20-L47 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Include.ts#L20-L47 type Include struct { Name string } diff --git a/typedapi/types/enums/indexcheckonstartup/indexcheckonstartup.go b/typedapi/types/enums/indexcheckonstartup/indexcheckonstartup.go index a3ed1249f2..afcc5c3fa7 100644 --- a/typedapi/types/enums/indexcheckonstartup/indexcheckonstartup.go +++ b/typedapi/types/enums/indexcheckonstartup/indexcheckonstartup.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package indexcheckonstartup package indexcheckonstartup import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L260-L267 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L262-L269 type IndexCheckOnStartup struct { Name string } diff --git a/typedapi/types/enums/indexingjobstate/indexingjobstate.go b/typedapi/types/enums/indexingjobstate/indexingjobstate.go index 22bb6b40b0..df280c09ef 100644 --- a/typedapi/types/enums/indexingjobstate/indexingjobstate.go +++ b/typedapi/types/enums/indexingjobstate/indexingjobstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package indexingjobstate package indexingjobstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/get_jobs/types.ts#L66-L72 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/get_jobs/types.ts#L66-L72 type IndexingJobState struct { Name string } diff --git a/typedapi/types/enums/indexmetadatastate/indexmetadatastate.go b/typedapi/types/enums/indexmetadatastate/indexmetadatastate.go index 64988e53a3..e6ed75891d 100644 --- a/typedapi/types/enums/indexmetadatastate/indexmetadatastate.go +++ b/typedapi/types/enums/indexmetadatastate/indexmetadatastate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package indexmetadatastate package indexmetadatastate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/types.ts#L225-L232 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/types.ts#L225-L232 type IndexMetadataState struct { Name string } diff --git a/typedapi/types/enums/indexoptions/indexoptions.go b/typedapi/types/enums/indexoptions/indexoptions.go index d04b663dfd..4ad9e3c024 100644 --- a/typedapi/types/enums/indexoptions/indexoptions.go +++ b/typedapi/types/enums/indexoptions/indexoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package indexoptions package indexoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L243-L248 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L257-L262 type IndexOptions struct { Name string } diff --git a/typedapi/types/enums/indexprivilege/indexprivilege.go b/typedapi/types/enums/indexprivilege/indexprivilege.go index c3288a4cae..0ede9e248f 100644 --- a/typedapi/types/enums/indexprivilege/indexprivilege.go +++ b/typedapi/types/enums/indexprivilege/indexprivilege.go @@ -16,21 +16,19 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package indexprivilege package indexprivilege import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/Privileges.ts#L166-L187 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/Privileges.ts#L285-L327 type IndexPrivilege struct { Name string } var ( - None = IndexPrivilege{"none"} - All = IndexPrivilege{"all"} Autoconfigure = IndexPrivilege{"auto_configure"} @@ -41,6 +39,10 @@ var ( Createindex = IndexPrivilege{"create_index"} + Crossclusterreplication = IndexPrivilege{"cross_cluster_replication"} + + Crossclusterreplicationinternal = IndexPrivilege{"cross_cluster_replication_internal"} + Delete = IndexPrivilege{"delete"} Deleteindex = IndexPrivilege{"delete_index"} @@ -51,6 +53,8 @@ var ( Manage = IndexPrivilege{"manage"} + Managedatastreamlifecycle = IndexPrivilege{"manage_data_stream_lifecycle"} + Managefollowindex = IndexPrivilege{"manage_follow_index"} Manageilm = IndexPrivilege{"manage_ilm"} @@ -59,6 +63,8 @@ var ( Monitor = IndexPrivilege{"monitor"} + None = IndexPrivilege{"none"} + Read = IndexPrivilege{"read"} Readcrosscluster = IndexPrivilege{"read_cross_cluster"} @@ -75,8 +81,6 @@ func (i IndexPrivilege) MarshalText() (text []byte, err error) { func (i *IndexPrivilege) UnmarshalText(text []byte) error { switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { - case "none": - *i = None case "all": *i = All case "auto_configure": @@ -87,6 +91,10 @@ func (i *IndexPrivilege) UnmarshalText(text []byte) error { *i = Createdoc case "create_index": *i = Createindex + case "cross_cluster_replication": + *i = Crossclusterreplication + case "cross_cluster_replication_internal": + *i = Crossclusterreplicationinternal case "delete": *i = Delete case "delete_index": @@ -97,6 +105,8 @@ func (i *IndexPrivilege) UnmarshalText(text []byte) error { *i = Maintenance case "manage": *i = Manage + case "manage_data_stream_lifecycle": + *i = Managedatastreamlifecycle case "manage_follow_index": *i = Managefollowindex case "manage_ilm": @@ -105,6 +115,8 @@ func (i *IndexPrivilege) UnmarshalText(text []byte) error { *i = Manageleaderindex case "monitor": *i = Monitor + case "none": + *i = None case "read": *i = Read case "read_cross_cluster": diff --git a/typedapi/types/enums/indexroutingallocationoptions/indexroutingallocationoptions.go b/typedapi/types/enums/indexroutingallocationoptions/indexroutingallocationoptions.go index 495cb82c86..fdf0a23f35 100644 --- a/typedapi/types/enums/indexroutingallocationoptions/indexroutingallocationoptions.go +++ b/typedapi/types/enums/indexroutingallocationoptions/indexroutingallocationoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package indexroutingallocationoptions package indexroutingallocationoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexRouting.ts#L38-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexRouting.ts#L38-L43 type IndexRoutingAllocationOptions struct { Name string } diff --git a/typedapi/types/enums/indexroutingrebalanceoptions/indexroutingrebalanceoptions.go b/typedapi/types/enums/indexroutingrebalanceoptions/indexroutingrebalanceoptions.go index c802453431..21910e7511 100644 --- a/typedapi/types/enums/indexroutingrebalanceoptions/indexroutingrebalanceoptions.go +++ b/typedapi/types/enums/indexroutingrebalanceoptions/indexroutingrebalanceoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package indexroutingrebalanceoptions package indexroutingrebalanceoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexRouting.ts#L45-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexRouting.ts#L45-L50 type IndexRoutingRebalanceOptions struct { Name string } diff --git a/typedapi/types/enums/indicatorhealthstatus/indicatorhealthstatus.go b/typedapi/types/enums/indicatorhealthstatus/indicatorhealthstatus.go index 1258de3138..8049c484d3 100644 --- a/typedapi/types/enums/indicatorhealthstatus/indicatorhealthstatus.go +++ b/typedapi/types/enums/indicatorhealthstatus/indicatorhealthstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package indicatorhealthstatus package indicatorhealthstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L25-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L25-L30 type IndicatorHealthStatus struct { Name string } diff --git a/typedapi/types/enums/indicesblockoptions/indicesblockoptions.go b/typedapi/types/enums/indicesblockoptions/indicesblockoptions.go index b01147ee91..56031498dd 100644 --- a/typedapi/types/enums/indicesblockoptions/indicesblockoptions.go +++ b/typedapi/types/enums/indicesblockoptions/indicesblockoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package indicesblockoptions package indicesblockoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/add_block/IndicesAddBlockRequest.ts#L43-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/add_block/IndicesAddBlockRequest.ts#L43-L48 type IndicesBlockOptions struct { Name string } diff --git a/typedapi/types/enums/inputtype/inputtype.go b/typedapi/types/enums/inputtype/inputtype.go index b82876a7b5..4bbec8a9b2 100644 --- a/typedapi/types/enums/inputtype/inputtype.go +++ b/typedapi/types/enums/inputtype/inputtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package inputtype package inputtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Input.ts#L100-L104 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Input.ts#L100-L104 type InputType struct { Name string } diff --git a/typedapi/types/enums/jobblockedreason/jobblockedreason.go b/typedapi/types/enums/jobblockedreason/jobblockedreason.go index 9a573e6013..fb87552d9b 100644 --- a/typedapi/types/enums/jobblockedreason/jobblockedreason.go +++ b/typedapi/types/enums/jobblockedreason/jobblockedreason.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package jobblockedreason package jobblockedreason import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Job.ts#L397-L401 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Job.ts#L397-L401 type JobBlockedReason struct { Name string } diff --git a/typedapi/types/enums/jobstate/jobstate.go b/typedapi/types/enums/jobstate/jobstate.go index e6b669ac8c..fbc8cc9485 100644 --- a/typedapi/types/enums/jobstate/jobstate.go +++ b/typedapi/types/enums/jobstate/jobstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package jobstate package jobstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Job.ts#L36-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Job.ts#L36-L52 type JobState struct { Name string } diff --git a/typedapi/types/enums/jsonprocessorconflictstrategy/jsonprocessorconflictstrategy.go b/typedapi/types/enums/jsonprocessorconflictstrategy/jsonprocessorconflictstrategy.go index 95c058b21a..395327cfaa 100644 --- a/typedapi/types/enums/jsonprocessorconflictstrategy/jsonprocessorconflictstrategy.go +++ b/typedapi/types/enums/jsonprocessorconflictstrategy/jsonprocessorconflictstrategy.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package jsonprocessorconflictstrategy package jsonprocessorconflictstrategy import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L849-L854 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L849-L854 type JsonProcessorConflictStrategy struct { Name string } diff --git a/typedapi/types/enums/keeptypesmode/keeptypesmode.go b/typedapi/types/enums/keeptypesmode/keeptypesmode.go index e156923091..f789048a9e 100644 --- a/typedapi/types/enums/keeptypesmode/keeptypesmode.go +++ b/typedapi/types/enums/keeptypesmode/keeptypesmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package keeptypesmode package keeptypesmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L213-L216 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L215-L218 type KeepTypesMode struct { Name string } diff --git a/typedapi/types/enums/kuromojitokenizationmode/kuromojitokenizationmode.go b/typedapi/types/enums/kuromojitokenizationmode/kuromojitokenizationmode.go index bc7639617f..32f20e5b69 100644 --- a/typedapi/types/enums/kuromojitokenizationmode/kuromojitokenizationmode.go +++ b/typedapi/types/enums/kuromojitokenizationmode/kuromojitokenizationmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package kuromojitokenizationmode package kuromojitokenizationmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/kuromoji-plugin.ts#L52-L56 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/kuromoji-plugin.ts#L52-L56 type KuromojiTokenizationMode struct { Name string } diff --git a/typedapi/types/enums/language/language.go b/typedapi/types/enums/language/language.go index b31da5a7a4..385ceefbfb 100644 --- a/typedapi/types/enums/language/language.go +++ b/typedapi/types/enums/language/language.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package language package language import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/languages.ts#L20-L55 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/languages.ts#L20-L55 type Language struct { Name string } diff --git a/typedapi/types/enums/level/level.go b/typedapi/types/enums/level/level.go index 39c820df04..83451f91d3 100644 --- a/typedapi/types/enums/level/level.go +++ b/typedapi/types/enums/level/level.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package level package level import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L249-L253 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L249-L253 type Level struct { Name string } diff --git a/typedapi/types/enums/licensestatus/licensestatus.go b/typedapi/types/enums/licensestatus/licensestatus.go index 07339dc57d..6d710ec2b5 100644 --- a/typedapi/types/enums/licensestatus/licensestatus.go +++ b/typedapi/types/enums/licensestatus/licensestatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package licensestatus package licensestatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/license/_types/License.ts#L35-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/license/_types/License.ts#L35-L40 type LicenseStatus struct { Name string } diff --git a/typedapi/types/enums/licensetype/licensetype.go b/typedapi/types/enums/licensetype/licensetype.go index 6b33c75132..920ca6151d 100644 --- a/typedapi/types/enums/licensetype/licensetype.go +++ b/typedapi/types/enums/licensetype/licensetype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package licensetype package licensetype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/license/_types/License.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/license/_types/License.ts#L23-L33 type LicenseType struct { Name string } diff --git a/typedapi/types/enums/lifecycleoperationmode/lifecycleoperationmode.go b/typedapi/types/enums/lifecycleoperationmode/lifecycleoperationmode.go index fab3e7b74a..67a4b778ca 100644 --- a/typedapi/types/enums/lifecycleoperationmode/lifecycleoperationmode.go +++ b/typedapi/types/enums/lifecycleoperationmode/lifecycleoperationmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package lifecycleoperationmode package lifecycleoperationmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Lifecycle.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Lifecycle.ts#L20-L24 type LifecycleOperationMode struct { Name string } diff --git a/typedapi/types/enums/managedby/managedby.go b/typedapi/types/enums/managedby/managedby.go index ae1deefe48..a3162e6037 100644 --- a/typedapi/types/enums/managedby/managedby.go +++ b/typedapi/types/enums/managedby/managedby.go @@ -16,22 +16,22 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package managedby package managedby import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/DataStream.ts#L32-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/DataStream.ts#L32-L37 type ManagedBy struct { Name string } var ( - IndexLifecycleManagement = ManagedBy{"Index Lifecycle Management"} + Ilm = ManagedBy{"Index Lifecycle Management"} - DataStreamLifecycle = ManagedBy{"Data stream lifecycle"} + Datastream = ManagedBy{"Data stream lifecycle"} Unmanaged = ManagedBy{"Unmanaged"} ) @@ -44,9 +44,9 @@ func (m *ManagedBy) UnmarshalText(text []byte) error { switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "Index Lifecycle Management": - *m = IndexLifecycleManagement + *m = Ilm case "Data stream lifecycle": - *m = DataStreamLifecycle + *m = Datastream case "Unmanaged": *m = Unmanaged default: diff --git a/typedapi/types/enums/matchtype/matchtype.go b/typedapi/types/enums/matchtype/matchtype.go index ab79b15c64..ed53dabc3c 100644 --- a/typedapi/types/enums/matchtype/matchtype.go +++ b/typedapi/types/enums/matchtype/matchtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package matchtype package matchtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/dynamic-template.ts#L32-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/dynamic-template.ts#L44-L47 type MatchType struct { Name string } diff --git a/typedapi/types/enums/memorystatus/memorystatus.go b/typedapi/types/enums/memorystatus/memorystatus.go index fd24b5cee0..f5004578be 100644 --- a/typedapi/types/enums/memorystatus/memorystatus.go +++ b/typedapi/types/enums/memorystatus/memorystatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package memorystatus package memorystatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Model.ts#L88-L92 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Model.ts#L88-L92 type MemoryStatus struct { Name string } diff --git a/typedapi/types/enums/metric/metric.go b/typedapi/types/enums/metric/metric.go index 7fffefa0a0..95b4c34151 100644 --- a/typedapi/types/enums/metric/metric.go +++ b/typedapi/types/enums/metric/metric.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package metric package metric import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/_types/Metric.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/_types/Metric.ts#L22-L28 type Metric struct { Name string } diff --git a/typedapi/types/enums/migrationstatus/migrationstatus.go b/typedapi/types/enums/migrationstatus/migrationstatus.go index f55723e4f0..aaaaec3e97 100644 --- a/typedapi/types/enums/migrationstatus/migrationstatus.go +++ b/typedapi/types/enums/migrationstatus/migrationstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package migrationstatus package migrationstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L30-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L30-L35 type MigrationStatus struct { Name string } diff --git a/typedapi/types/enums/minimuminterval/minimuminterval.go b/typedapi/types/enums/minimuminterval/minimuminterval.go index 77dffe4125..f8c96a9fac 100644 --- a/typedapi/types/enums/minimuminterval/minimuminterval.go +++ b/typedapi/types/enums/minimuminterval/minimuminterval.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package minimuminterval package minimuminterval import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L102-L109 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L104-L111 type MinimumInterval struct { Name string } diff --git a/typedapi/types/enums/missingorder/missingorder.go b/typedapi/types/enums/missingorder/missingorder.go index e4720af21a..1bd91676a5 100644 --- a/typedapi/types/enums/missingorder/missingorder.go +++ b/typedapi/types/enums/missingorder/missingorder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package missingorder package missingorder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/AggregationContainer.ts#L518-L522 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/AggregationContainer.ts#L517-L521 type MissingOrder struct { Name string } diff --git a/typedapi/types/enums/month/month.go b/typedapi/types/enums/month/month.go index f99e02b5d2..5fd95b806b 100644 --- a/typedapi/types/enums/month/month.go +++ b/typedapi/types/enums/month/month.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package month package month import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Schedule.ts#L65-L78 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Schedule.ts#L65-L78 type Month struct { Name string } diff --git a/typedapi/types/enums/multivaluemode/multivaluemode.go b/typedapi/types/enums/multivaluemode/multivaluemode.go index 07fc27e8c2..21815de219 100644 --- a/typedapi/types/enums/multivaluemode/multivaluemode.go +++ b/typedapi/types/enums/multivaluemode/multivaluemode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package multivaluemode package multivaluemode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L343-L360 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L352-L369 type MultiValueMode struct { Name string } diff --git a/typedapi/types/enums/noderole/noderole.go b/typedapi/types/enums/noderole/noderole.go index bcb67e141e..be50d95f3b 100644 --- a/typedapi/types/enums/noderole/noderole.go +++ b/typedapi/types/enums/noderole/noderole.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package noderole package noderole import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Node.ts#L77-L95 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Node.ts#L77-L95 type NodeRole struct { Name string } diff --git a/typedapi/types/enums/noridecompoundmode/noridecompoundmode.go b/typedapi/types/enums/noridecompoundmode/noridecompoundmode.go index f1cd969640..819dc842db 100644 --- a/typedapi/types/enums/noridecompoundmode/noridecompoundmode.go +++ b/typedapi/types/enums/noridecompoundmode/noridecompoundmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package noridecompoundmode package noridecompoundmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L75-L79 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L75-L79 type NoriDecompoundMode struct { Name string } diff --git a/typedapi/types/enums/normalization/normalization.go b/typedapi/types/enums/normalization/normalization.go index bf455a6e1a..43810222c4 100644 --- a/typedapi/types/enums/normalization/normalization.go +++ b/typedapi/types/enums/normalization/normalization.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package normalization package normalization import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Similarity.ts#L52-L58 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Similarity.ts#L52-L58 type Normalization struct { Name string } diff --git a/typedapi/types/enums/normalizemethod/normalizemethod.go b/typedapi/types/enums/normalizemethod/normalizemethod.go index ac04e2e4c3..ec7f17abbf 100644 --- a/typedapi/types/enums/normalizemethod/normalizemethod.go +++ b/typedapi/types/enums/normalizemethod/normalizemethod.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package normalizemethod package normalizemethod import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L326-L352 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L326-L352 type NormalizeMethod struct { Name string } @@ -37,7 +37,7 @@ var ( Mean = NormalizeMethod{"mean"} - ZScore = NormalizeMethod{"z-score"} + Zscore = NormalizeMethod{"z-score"} Softmax = NormalizeMethod{"softmax"} ) @@ -58,7 +58,7 @@ func (n *NormalizeMethod) UnmarshalText(text []byte) error { case "mean": *n = Mean case "z-score": - *n = ZScore + *n = Zscore case "softmax": *n = Softmax default: diff --git a/typedapi/types/enums/numericfielddataformat/numericfielddataformat.go b/typedapi/types/enums/numericfielddataformat/numericfielddataformat.go index 1c2e17050e..566f9f4ba4 100644 --- a/typedapi/types/enums/numericfielddataformat/numericfielddataformat.go +++ b/typedapi/types/enums/numericfielddataformat/numericfielddataformat.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package numericfielddataformat package numericfielddataformat import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/NumericFielddataFormat.ts#L20-L23 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/NumericFielddataFormat.ts#L20-L23 type NumericFielddataFormat struct { Name string } diff --git a/typedapi/types/enums/onscripterror/onscripterror.go b/typedapi/types/enums/onscripterror/onscripterror.go index 0f311fb56c..a2a7773d5b 100644 --- a/typedapi/types/enums/onscripterror/onscripterror.go +++ b/typedapi/types/enums/onscripterror/onscripterror.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package onscripterror package onscripterror import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L129-L132 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L137-L140 type OnScriptError struct { Name string } diff --git a/typedapi/types/enums/operationtype/operationtype.go b/typedapi/types/enums/operationtype/operationtype.go index b8b874672b..40b60fd4e9 100644 --- a/typedapi/types/enums/operationtype/operationtype.go +++ b/typedapi/types/enums/operationtype/operationtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package operationtype package operationtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/bulk/types.ts#L83-L88 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/bulk/types.ts#L83-L88 type OperationType struct { Name string } diff --git a/typedapi/types/enums/operator/operator.go b/typedapi/types/enums/operator/operator.go index da9768488c..da5f90cdb8 100644 --- a/typedapi/types/enums/operator/operator.go +++ b/typedapi/types/enums/operator/operator.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package operator package operator import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/Operator.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/Operator.ts#L22-L27 type Operator struct { Name string } diff --git a/typedapi/types/enums/optype/optype.go b/typedapi/types/enums/optype/optype.go index b13014b702..8d857bc663 100644 --- a/typedapi/types/enums/optype/optype.go +++ b/typedapi/types/enums/optype/optype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package optype package optype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L255-L264 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L255-L264 type OpType struct { Name string } diff --git a/typedapi/types/enums/pagerdutycontexttype/pagerdutycontexttype.go b/typedapi/types/enums/pagerdutycontexttype/pagerdutycontexttype.go index d45997dd50..8502233c8b 100644 --- a/typedapi/types/enums/pagerdutycontexttype/pagerdutycontexttype.go +++ b/typedapi/types/enums/pagerdutycontexttype/pagerdutycontexttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package pagerdutycontexttype package pagerdutycontexttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L67-L70 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L67-L70 type PagerDutyContextType struct { Name string } diff --git a/typedapi/types/enums/pagerdutyeventtype/pagerdutyeventtype.go b/typedapi/types/enums/pagerdutyeventtype/pagerdutyeventtype.go index 8be321a260..82ebd01a8b 100644 --- a/typedapi/types/enums/pagerdutyeventtype/pagerdutyeventtype.go +++ b/typedapi/types/enums/pagerdutyeventtype/pagerdutyeventtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package pagerdutyeventtype package pagerdutyeventtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L72-L76 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L72-L76 type PagerDutyEventType struct { Name string } diff --git a/typedapi/types/enums/phoneticencoder/phoneticencoder.go b/typedapi/types/enums/phoneticencoder/phoneticencoder.go index 8f11e72d45..ea3d82cfec 100644 --- a/typedapi/types/enums/phoneticencoder/phoneticencoder.go +++ b/typedapi/types/enums/phoneticencoder/phoneticencoder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package phoneticencoder package phoneticencoder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/phonetic-plugin.ts#L23-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/phonetic-plugin.ts#L23-L36 type PhoneticEncoder struct { Name string } diff --git a/typedapi/types/enums/phoneticlanguage/phoneticlanguage.go b/typedapi/types/enums/phoneticlanguage/phoneticlanguage.go index 9a6fc842df..60efbaa9e9 100644 --- a/typedapi/types/enums/phoneticlanguage/phoneticlanguage.go +++ b/typedapi/types/enums/phoneticlanguage/phoneticlanguage.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package phoneticlanguage package phoneticlanguage import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/phonetic-plugin.ts#L38-L51 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/phonetic-plugin.ts#L38-L51 type PhoneticLanguage struct { Name string } diff --git a/typedapi/types/enums/phoneticnametype/phoneticnametype.go b/typedapi/types/enums/phoneticnametype/phoneticnametype.go index 00fe1d779a..d95d5b0ee3 100644 --- a/typedapi/types/enums/phoneticnametype/phoneticnametype.go +++ b/typedapi/types/enums/phoneticnametype/phoneticnametype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package phoneticnametype package phoneticnametype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/phonetic-plugin.ts#L53-L57 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/phonetic-plugin.ts#L53-L57 type PhoneticNameType struct { Name string } diff --git a/typedapi/types/enums/phoneticruletype/phoneticruletype.go b/typedapi/types/enums/phoneticruletype/phoneticruletype.go index aef6edd6aa..7cf971b88d 100644 --- a/typedapi/types/enums/phoneticruletype/phoneticruletype.go +++ b/typedapi/types/enums/phoneticruletype/phoneticruletype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package phoneticruletype package phoneticruletype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/phonetic-plugin.ts#L59-L62 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/phonetic-plugin.ts#L59-L62 type PhoneticRuleType struct { Name string } diff --git a/typedapi/types/enums/policytype/policytype.go b/typedapi/types/enums/policytype/policytype.go index c2f81e2f1a..e3f9fe010b 100644 --- a/typedapi/types/enums/policytype/policytype.go +++ b/typedapi/types/enums/policytype/policytype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package policytype package policytype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/enrich/_types/Policy.ts#L28-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/enrich/_types/Policy.ts#L28-L32 type PolicyType struct { Name string } diff --git a/typedapi/types/enums/quantifier/quantifier.go b/typedapi/types/enums/quantifier/quantifier.go index 32dfa873ce..27a1d0e71c 100644 --- a/typedapi/types/enums/quantifier/quantifier.go +++ b/typedapi/types/enums/quantifier/quantifier.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package quantifier package quantifier import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Conditions.ts#L71-L74 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Conditions.ts#L74-L77 type Quantifier struct { Name string } diff --git a/typedapi/types/enums/queryrulecriteriatype/queryrulecriteriatype.go b/typedapi/types/enums/queryrulecriteriatype/queryrulecriteriatype.go index 3404dd993c..db21a3c3b4 100644 --- a/typedapi/types/enums/queryrulecriteriatype/queryrulecriteriatype.go +++ b/typedapi/types/enums/queryrulecriteriatype/queryrulecriteriatype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package queryrulecriteriatype package queryrulecriteriatype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/query_ruleset/_types/QueryRuleset.ts#L54-L65 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/_types/QueryRuleset.ts#L54-L66 type QueryRuleCriteriaType struct { Name string } @@ -48,6 +48,8 @@ var ( Gt = QueryRuleCriteriaType{"gt"} Gte = QueryRuleCriteriaType{"gte"} + + Always = QueryRuleCriteriaType{"always"} ) func (q QueryRuleCriteriaType) MarshalText() (text []byte, err error) { @@ -77,6 +79,8 @@ func (q *QueryRuleCriteriaType) UnmarshalText(text []byte) error { *q = Gt case "gte": *q = Gte + case "always": + *q = Always default: *q = QueryRuleCriteriaType{string(text)} } diff --git a/typedapi/types/enums/queryruletype/queryruletype.go b/typedapi/types/enums/queryruletype/queryruletype.go index 05c2ad1113..7f1bf27f72 100644 --- a/typedapi/types/enums/queryruletype/queryruletype.go +++ b/typedapi/types/enums/queryruletype/queryruletype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package queryruletype package queryruletype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/query_ruleset/_types/QueryRuleset.ts#L44-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/_types/QueryRuleset.ts#L44-L46 type QueryRuleType struct { Name string } diff --git a/typedapi/types/enums/rangerelation/rangerelation.go b/typedapi/types/enums/rangerelation/rangerelation.go index 2b5e0c3ee3..f2cfd8449b 100644 --- a/typedapi/types/enums/rangerelation/rangerelation.go +++ b/typedapi/types/enums/rangerelation/rangerelation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package rangerelation package rangerelation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L170-L183 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L172-L185 type RangeRelation struct { Name string } diff --git a/typedapi/types/enums/ratemode/ratemode.go b/typedapi/types/enums/ratemode/ratemode.go index 03d89373d3..c46a0fb822 100644 --- a/typedapi/types/enums/ratemode/ratemode.go +++ b/typedapi/types/enums/ratemode/ratemode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package ratemode package ratemode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L243-L252 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L243-L252 type RateMode struct { Name string } diff --git a/typedapi/types/enums/refresh/refresh.go b/typedapi/types/enums/refresh/refresh.go index b8ef5ae47f..57ef2d10ef 100644 --- a/typedapi/types/enums/refresh/refresh.go +++ b/typedapi/types/enums/refresh/refresh.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package refresh package refresh import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L266-L273 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L266-L273 type Refresh struct { Name string } diff --git a/typedapi/types/enums/responsecontenttype/responsecontenttype.go b/typedapi/types/enums/responsecontenttype/responsecontenttype.go index 4395dab506..6558c40652 100644 --- a/typedapi/types/enums/responsecontenttype/responsecontenttype.go +++ b/typedapi/types/enums/responsecontenttype/responsecontenttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package responsecontenttype package responsecontenttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Input.ts#L106-L110 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Input.ts#L106-L110 type ResponseContentType struct { Name string } diff --git a/typedapi/types/enums/result/result.go b/typedapi/types/enums/result/result.go index 8f2703c0b2..d45389d38b 100644 --- a/typedapi/types/enums/result/result.go +++ b/typedapi/types/enums/result/result.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package result package result import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Result.ts#L20-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Result.ts#L20-L26 type Result struct { Name string } diff --git a/typedapi/types/enums/resultposition/resultposition.go b/typedapi/types/enums/resultposition/resultposition.go index fdba5c9224..d9530c61b4 100644 --- a/typedapi/types/enums/resultposition/resultposition.go +++ b/typedapi/types/enums/resultposition/resultposition.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package resultposition package resultposition import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/eql/search/types.ts#L20-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/eql/search/types.ts#L20-L32 type ResultPosition struct { Name string } diff --git a/typedapi/types/enums/routingstate/routingstate.go b/typedapi/types/enums/routingstate/routingstate.go index 37c197bc03..bd6632470d 100644 --- a/typedapi/types/enums/routingstate/routingstate.go +++ b/typedapi/types/enums/routingstate/routingstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package routingstate package routingstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L351-L372 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L351-L372 type RoutingState struct { Name string } diff --git a/typedapi/types/enums/ruleaction/ruleaction.go b/typedapi/types/enums/ruleaction/ruleaction.go index f4e23a5783..f1e5a01334 100644 --- a/typedapi/types/enums/ruleaction/ruleaction.go +++ b/typedapi/types/enums/ruleaction/ruleaction.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package ruleaction package ruleaction import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Rule.ts#L41-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Rule.ts#L41-L50 type RuleAction struct { Name string } diff --git a/typedapi/types/enums/runtimefieldtype/runtimefieldtype.go b/typedapi/types/enums/runtimefieldtype/runtimefieldtype.go index f736ec157b..b5f200ed55 100644 --- a/typedapi/types/enums/runtimefieldtype/runtimefieldtype.go +++ b/typedapi/types/enums/runtimefieldtype/runtimefieldtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package runtimefieldtype package runtimefieldtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/RuntimeFields.ts#L56-L65 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/RuntimeFields.ts#L56-L66 type RuntimeFieldType struct { Name string } @@ -31,6 +31,8 @@ type RuntimeFieldType struct { var ( Boolean = RuntimeFieldType{"boolean"} + Composite = RuntimeFieldType{"composite"} + Date = RuntimeFieldType{"date"} Double = RuntimeFieldType{"double"} @@ -55,6 +57,8 @@ func (r *RuntimeFieldType) UnmarshalText(text []byte) error { case "boolean": *r = Boolean + case "composite": + *r = Composite case "date": *r = Date case "double": diff --git a/typedapi/types/enums/sampleraggregationexecutionhint/sampleraggregationexecutionhint.go b/typedapi/types/enums/sampleraggregationexecutionhint/sampleraggregationexecutionhint.go index d33b6ef54a..d221693e0a 100644 --- a/typedapi/types/enums/sampleraggregationexecutionhint/sampleraggregationexecutionhint.go +++ b/typedapi/types/enums/sampleraggregationexecutionhint/sampleraggregationexecutionhint.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package sampleraggregationexecutionhint package sampleraggregationexecutionhint import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L343-L356 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L345-L358 type SamplerAggregationExecutionHint struct { Name string } diff --git a/typedapi/types/enums/scoremode/scoremode.go b/typedapi/types/enums/scoremode/scoremode.go index b65b95981d..57645c98e7 100644 --- a/typedapi/types/enums/scoremode/scoremode.go +++ b/typedapi/types/enums/scoremode/scoremode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package scoremode package scoremode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/rescoring.ts#L52-L74 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/rescoring.ts#L64-L86 type ScoreMode struct { Name string } diff --git a/typedapi/types/enums/scriptlanguage/scriptlanguage.go b/typedapi/types/enums/scriptlanguage/scriptlanguage.go index 97bfa8ef9e..04eb500950 100644 --- a/typedapi/types/enums/scriptlanguage/scriptlanguage.go +++ b/typedapi/types/enums/scriptlanguage/scriptlanguage.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package scriptlanguage package scriptlanguage import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Scripting.ts#L24-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Scripting.ts#L24-L45 type ScriptLanguage struct { Name string } diff --git a/typedapi/types/enums/scriptsorttype/scriptsorttype.go b/typedapi/types/enums/scriptsorttype/scriptsorttype.go index 675dae9732..2a4f1aef2c 100644 --- a/typedapi/types/enums/scriptsorttype/scriptsorttype.go +++ b/typedapi/types/enums/scriptsorttype/scriptsorttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package scriptsorttype package scriptsorttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/sort.ts#L76-L80 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/sort.ts#L80-L84 type ScriptSortType struct { Name string } diff --git a/typedapi/types/enums/searchtype/searchtype.go b/typedapi/types/enums/searchtype/searchtype.go index 51c26b5c36..2c63b6d2ee 100644 --- a/typedapi/types/enums/searchtype/searchtype.go +++ b/typedapi/types/enums/searchtype/searchtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package searchtype package searchtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L275-L280 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L275-L280 type SearchType struct { Name string } diff --git a/typedapi/types/enums/segmentsortmissing/segmentsortmissing.go b/typedapi/types/enums/segmentsortmissing/segmentsortmissing.go index 2f7686e46d..854ae768ce 100644 --- a/typedapi/types/enums/segmentsortmissing/segmentsortmissing.go +++ b/typedapi/types/enums/segmentsortmissing/segmentsortmissing.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package segmentsortmissing package segmentsortmissing import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSegmentSort.ts#L43-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSegmentSort.ts#L43-L46 type SegmentSortMissing struct { Name string } diff --git a/typedapi/types/enums/segmentsortmode/segmentsortmode.go b/typedapi/types/enums/segmentsortmode/segmentsortmode.go index fba5ea2120..c7e6acaa0d 100644 --- a/typedapi/types/enums/segmentsortmode/segmentsortmode.go +++ b/typedapi/types/enums/segmentsortmode/segmentsortmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package segmentsortmode package segmentsortmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSegmentSort.ts#L36-L41 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSegmentSort.ts#L36-L41 type SegmentSortMode struct { Name string } diff --git a/typedapi/types/enums/segmentsortorder/segmentsortorder.go b/typedapi/types/enums/segmentsortorder/segmentsortorder.go index 0f9215dee9..840b9912a0 100644 --- a/typedapi/types/enums/segmentsortorder/segmentsortorder.go +++ b/typedapi/types/enums/segmentsortorder/segmentsortorder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package segmentsortorder package segmentsortorder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSegmentSort.ts#L29-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSegmentSort.ts#L29-L34 type SegmentSortOrder struct { Name string } diff --git a/typedapi/types/enums/shapetype/shapetype.go b/typedapi/types/enums/shapetype/shapetype.go index 0c9f6bff73..5398625083 100644 --- a/typedapi/types/enums/shapetype/shapetype.go +++ b/typedapi/types/enums/shapetype/shapetype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package shapetype package shapetype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L1070-L1073 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L1074-L1077 type ShapeType struct { Name string } diff --git a/typedapi/types/enums/shardroutingstate/shardroutingstate.go b/typedapi/types/enums/shardroutingstate/shardroutingstate.go index 6808d43b57..84878c9654 100644 --- a/typedapi/types/enums/shardroutingstate/shardroutingstate.go +++ b/typedapi/types/enums/shardroutingstate/shardroutingstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package shardroutingstate package shardroutingstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/types.ts#L169-L174 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/types.ts#L169-L174 type ShardRoutingState struct { Name string } diff --git a/typedapi/types/enums/shardsstatsstage/shardsstatsstage.go b/typedapi/types/enums/shardsstatsstage/shardsstatsstage.go index b4c00e4cc5..392c580b37 100644 --- a/typedapi/types/enums/shardsstatsstage/shardsstatsstage.go +++ b/typedapi/types/enums/shardsstatsstage/shardsstatsstage.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package shardsstatsstage package shardsstatsstage import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotShardsStatsStage.ts#L20-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotShardsStatsStage.ts#L20-L31 type ShardsStatsStage struct { Name string } diff --git a/typedapi/types/enums/shardstoreallocation/shardstoreallocation.go b/typedapi/types/enums/shardstoreallocation/shardstoreallocation.go index 197f534236..2252fd9d57 100644 --- a/typedapi/types/enums/shardstoreallocation/shardstoreallocation.go +++ b/typedapi/types/enums/shardstoreallocation/shardstoreallocation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package shardstoreallocation package shardstoreallocation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/shard_stores/types.ts#L45-L49 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/shard_stores/types.ts#L48-L52 type ShardStoreAllocation struct { Name string } diff --git a/typedapi/types/enums/shardstorestatus/shardstorestatus.go b/typedapi/types/enums/shardstorestatus/shardstorestatus.go index cf84fa0be2..9717b27b13 100644 --- a/typedapi/types/enums/shardstorestatus/shardstorestatus.go +++ b/typedapi/types/enums/shardstorestatus/shardstorestatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package shardstorestatus package shardstorestatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/shard_stores/types.ts#L60-L69 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/shard_stores/types.ts#L63-L72 type ShardStoreStatus struct { Name string } diff --git a/typedapi/types/enums/shutdownstatus/shutdownstatus.go b/typedapi/types/enums/shutdownstatus/shutdownstatus.go index 4a84dab879..9e355f0484 100644 --- a/typedapi/types/enums/shutdownstatus/shutdownstatus.go +++ b/typedapi/types/enums/shutdownstatus/shutdownstatus.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package shutdownstatus package shutdownstatus import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L45-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L45-L50 type ShutdownStatus struct { Name string } diff --git a/typedapi/types/enums/shutdowntype/shutdowntype.go b/typedapi/types/enums/shutdowntype/shutdowntype.go index 2ecdb0d302..5341b4542e 100644 --- a/typedapi/types/enums/shutdowntype/shutdowntype.go +++ b/typedapi/types/enums/shutdowntype/shutdowntype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package shutdowntype package shutdowntype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L40-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L40-L43 type ShutdownType struct { Name string } diff --git a/typedapi/types/enums/simplequerystringflag/simplequerystringflag.go b/typedapi/types/enums/simplequerystringflag/simplequerystringflag.go index 25f6252c9c..98e2fb784f 100644 --- a/typedapi/types/enums/simplequerystringflag/simplequerystringflag.go +++ b/typedapi/types/enums/simplequerystringflag/simplequerystringflag.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package simplequerystringflag package simplequerystringflag import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L708-L763 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L708-L763 type SimpleQueryStringFlag struct { Name string } diff --git a/typedapi/types/enums/slicescalculation/slicescalculation.go b/typedapi/types/enums/slicescalculation/slicescalculation.go index 4a2b290a4e..3c765d01d7 100644 --- a/typedapi/types/enums/slicescalculation/slicescalculation.go +++ b/typedapi/types/enums/slicescalculation/slicescalculation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package slicescalculation package slicescalculation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L368-L376 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L371-L379 type SlicesCalculation struct { Name string } diff --git a/typedapi/types/enums/snapshotsort/snapshotsort.go b/typedapi/types/enums/snapshotsort/snapshotsort.go index 837e2d068c..adf4994406 100644 --- a/typedapi/types/enums/snapshotsort/snapshotsort.go +++ b/typedapi/types/enums/snapshotsort/snapshotsort.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package snapshotsort package snapshotsort import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotInfo.ts#L73-L93 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotInfo.ts#L73-L93 type SnapshotSort struct { Name string } diff --git a/typedapi/types/enums/snapshotupgradestate/snapshotupgradestate.go b/typedapi/types/enums/snapshotupgradestate/snapshotupgradestate.go index dc5a23ec89..6415dc107c 100644 --- a/typedapi/types/enums/snapshotupgradestate/snapshotupgradestate.go +++ b/typedapi/types/enums/snapshotupgradestate/snapshotupgradestate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package snapshotupgradestate package snapshotupgradestate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Model.ts#L94-L99 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Model.ts#L94-L99 type SnapshotUpgradeState struct { Name string } diff --git a/typedapi/types/enums/snowballlanguage/snowballlanguage.go b/typedapi/types/enums/snowballlanguage/snowballlanguage.go index 27407c1e14..18effc7620 100644 --- a/typedapi/types/enums/snowballlanguage/snowballlanguage.go +++ b/typedapi/types/enums/snowballlanguage/snowballlanguage.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package snowballlanguage package snowballlanguage import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/languages.ts#L57-L80 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/languages.ts#L57-L80 type SnowballLanguage struct { Name string } diff --git a/typedapi/types/enums/sortmode/sortmode.go b/typedapi/types/enums/sortmode/sortmode.go index 3c852ce152..1a82c84080 100644 --- a/typedapi/types/enums/sortmode/sortmode.go +++ b/typedapi/types/enums/sortmode/sortmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package sortmode package sortmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/sort.ts#L103-L112 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/sort.ts#L108-L117 type SortMode struct { Name string } diff --git a/typedapi/types/enums/sortorder/sortorder.go b/typedapi/types/enums/sortorder/sortorder.go index 87207e08fa..85c3eb5f3f 100644 --- a/typedapi/types/enums/sortorder/sortorder.go +++ b/typedapi/types/enums/sortorder/sortorder.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package sortorder package sortorder import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/sort.ts#L114-L123 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/sort.ts#L119-L128 type SortOrder struct { Name string } diff --git a/typedapi/types/enums/sourcefieldmode/sourcefieldmode.go b/typedapi/types/enums/sourcefieldmode/sourcefieldmode.go index a422206713..c1117a9755 100644 --- a/typedapi/types/enums/sourcefieldmode/sourcefieldmode.go +++ b/typedapi/types/enums/sourcefieldmode/sourcefieldmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package sourcefieldmode package sourcefieldmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/meta-fields.ts#L67-L75 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/meta-fields.ts#L67-L75 type SourceFieldMode struct { Name string } diff --git a/typedapi/types/enums/statslevel/statslevel.go b/typedapi/types/enums/statslevel/statslevel.go index 64a77015b3..e2496a2b82 100644 --- a/typedapi/types/enums/statslevel/statslevel.go +++ b/typedapi/types/enums/statslevel/statslevel.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package statslevel package statslevel import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/searchable_snapshots/_types/stats.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/searchable_snapshots/_types/stats.ts#L20-L24 type StatsLevel struct { Name string } diff --git a/typedapi/types/enums/storagetype/storagetype.go b/typedapi/types/enums/storagetype/storagetype.go index 4ce11fc8d9..66d3ee0ad4 100644 --- a/typedapi/types/enums/storagetype/storagetype.go +++ b/typedapi/types/enums/storagetype/storagetype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package storagetype package storagetype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L509-L539 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L520-L548 type StorageType struct { Name string } diff --git a/typedapi/types/enums/stringdistance/stringdistance.go b/typedapi/types/enums/stringdistance/stringdistance.go index 0a886b72c3..59c5901cd9 100644 --- a/typedapi/types/enums/stringdistance/stringdistance.go +++ b/typedapi/types/enums/stringdistance/stringdistance.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package stringdistance package stringdistance import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L469-L490 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L472-L493 type StringDistance struct { Name string } diff --git a/typedapi/types/enums/suggestmode/suggestmode.go b/typedapi/types/enums/suggestmode/suggestmode.go index bc3ab01fe3..350789edb2 100644 --- a/typedapi/types/enums/suggestmode/suggestmode.go +++ b/typedapi/types/enums/suggestmode/suggestmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package suggestmode package suggestmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L282-L295 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L282-L295 type SuggestMode struct { Name string } diff --git a/typedapi/types/enums/suggestsort/suggestsort.go b/typedapi/types/enums/suggestsort/suggestsort.go index 4b7e27b799..4f9f507896 100644 --- a/typedapi/types/enums/suggestsort/suggestsort.go +++ b/typedapi/types/enums/suggestsort/suggestsort.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package suggestsort package suggestsort import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L492-L501 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L495-L504 type SuggestSort struct { Name string } diff --git a/typedapi/types/enums/synonymformat/synonymformat.go b/typedapi/types/enums/synonymformat/synonymformat.go index 86ea1cddbe..907c9da52a 100644 --- a/typedapi/types/enums/synonymformat/synonymformat.go +++ b/typedapi/types/enums/synonymformat/synonymformat.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package synonymformat package synonymformat import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L105-L108 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L105-L108 type SynonymFormat struct { Name string } diff --git a/typedapi/types/enums/tasktype/tasktype.go b/typedapi/types/enums/tasktype/tasktype.go index cc167475c4..15e92dc81d 100644 --- a/typedapi/types/enums/tasktype/tasktype.go +++ b/typedapi/types/enums/tasktype/tasktype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package tasktype package tasktype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/inference/_types/TaskType.ts#L20-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/_types/TaskType.ts#L20-L28 type TaskType struct { Name string } @@ -32,6 +32,10 @@ var ( Sparseembedding = TaskType{"sparse_embedding"} Textembedding = TaskType{"text_embedding"} + + Rerank = TaskType{"rerank"} + + Completion = TaskType{"completion"} ) func (t TaskType) MarshalText() (text []byte, err error) { @@ -45,6 +49,10 @@ func (t *TaskType) UnmarshalText(text []byte) error { *t = Sparseembedding case "text_embedding": *t = Textembedding + case "rerank": + *t = Rerank + case "completion": + *t = Completion default: *t = TaskType{string(text)} } diff --git a/typedapi/types/enums/templateformat/templateformat.go b/typedapi/types/enums/templateformat/templateformat.go index 3a5a30c08b..9d3483444d 100644 --- a/typedapi/types/enums/templateformat/templateformat.go +++ b/typedapi/types/enums/templateformat/templateformat.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package templateformat package templateformat import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/RoleTemplate.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/RoleTemplate.ts#L22-L25 type TemplateFormat struct { Name string } diff --git a/typedapi/types/enums/termsaggregationcollectmode/termsaggregationcollectmode.go b/typedapi/types/enums/termsaggregationcollectmode/termsaggregationcollectmode.go index b41d5b2c60..57983d1d7c 100644 --- a/typedapi/types/enums/termsaggregationcollectmode/termsaggregationcollectmode.go +++ b/typedapi/types/enums/termsaggregationcollectmode/termsaggregationcollectmode.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package termsaggregationcollectmode package termsaggregationcollectmode import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L980-L989 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L987-L996 type TermsAggregationCollectMode struct { Name string } diff --git a/typedapi/types/enums/termsaggregationexecutionhint/termsaggregationexecutionhint.go b/typedapi/types/enums/termsaggregationexecutionhint/termsaggregationexecutionhint.go index f3303e7111..2cb0fda665 100644 --- a/typedapi/types/enums/termsaggregationexecutionhint/termsaggregationexecutionhint.go +++ b/typedapi/types/enums/termsaggregationexecutionhint/termsaggregationexecutionhint.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package termsaggregationexecutionhint package termsaggregationexecutionhint import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L991-L996 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L998-L1003 type TermsAggregationExecutionHint struct { Name string } diff --git a/typedapi/types/enums/termvectoroption/termvectoroption.go b/typedapi/types/enums/termvectoroption/termvectoroption.go index f407a44933..c931b4f49f 100644 --- a/typedapi/types/enums/termvectoroption/termvectoroption.go +++ b/typedapi/types/enums/termvectoroption/termvectoroption.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package termvectoroption package termvectoroption import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/TermVectorOption.ts#L20-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/TermVectorOption.ts#L20-L28 type TermVectorOption struct { Name string } diff --git a/typedapi/types/enums/textquerytype/textquerytype.go b/typedapi/types/enums/textquerytype/textquerytype.go index 0dd698eaaf..b0d513d2be 100644 --- a/typedapi/types/enums/textquerytype/textquerytype.go +++ b/typedapi/types/enums/textquerytype/textquerytype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package textquerytype package textquerytype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L541-L567 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L541-L567 type TextQueryType struct { Name string } diff --git a/typedapi/types/enums/threadtype/threadtype.go b/typedapi/types/enums/threadtype/threadtype.go index 1a69fdf503..a4d3ec2849 100644 --- a/typedapi/types/enums/threadtype/threadtype.go +++ b/typedapi/types/enums/threadtype/threadtype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package threadtype package threadtype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L297-L303 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L297-L303 type ThreadType struct { Name string } diff --git a/typedapi/types/enums/timeseriesmetrictype/timeseriesmetrictype.go b/typedapi/types/enums/timeseriesmetrictype/timeseriesmetrictype.go index 30a5b0ee13..f1803f6296 100644 --- a/typedapi/types/enums/timeseriesmetrictype/timeseriesmetrictype.go +++ b/typedapi/types/enums/timeseriesmetrictype/timeseriesmetrictype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package timeseriesmetrictype package timeseriesmetrictype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/TimeSeriesMetricType.ts#L20-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/TimeSeriesMetricType.ts#L20-L26 type TimeSeriesMetricType struct { Name string } diff --git a/typedapi/types/enums/timeunit/timeunit.go b/typedapi/types/enums/timeunit/timeunit.go index af85eef45c..cf270c6854 100644 --- a/typedapi/types/enums/timeunit/timeunit.go +++ b/typedapi/types/enums/timeunit/timeunit.go @@ -16,32 +16,32 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package timeunit package timeunit import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Time.ts#L69-L77 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Time.ts#L69-L77 type TimeUnit struct { Name string } var ( - Nanos = TimeUnit{"nanos"} + Nanoseconds = TimeUnit{"nanos"} - Micros = TimeUnit{"micros"} + Microseconds = TimeUnit{"micros"} - Ms = TimeUnit{"ms"} + Milliseconds = TimeUnit{"ms"} - S = TimeUnit{"s"} + Seconds = TimeUnit{"s"} - M = TimeUnit{"m"} + Minutes = TimeUnit{"m"} - H = TimeUnit{"h"} + Hours = TimeUnit{"h"} - D = TimeUnit{"d"} + Days = TimeUnit{"d"} ) func (t TimeUnit) MarshalText() (text []byte, err error) { @@ -52,19 +52,19 @@ func (t *TimeUnit) UnmarshalText(text []byte) error { switch strings.ReplaceAll(strings.ToLower(string(text)), "\"", "") { case "nanos": - *t = Nanos + *t = Nanoseconds case "micros": - *t = Micros + *t = Microseconds case "ms": - *t = Ms + *t = Milliseconds case "s": - *t = S + *t = Seconds case "m": - *t = M + *t = Minutes case "h": - *t = H + *t = Hours case "d": - *t = D + *t = Days default: *t = TimeUnit{string(text)} } diff --git a/typedapi/types/enums/tokenchar/tokenchar.go b/typedapi/types/enums/tokenchar/tokenchar.go index 2eb1b6a1f2..7fdcd8ea13 100644 --- a/typedapi/types/enums/tokenchar/tokenchar.go +++ b/typedapi/types/enums/tokenchar/tokenchar.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package tokenchar package tokenchar import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L47-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L47-L54 type TokenChar struct { Name string } diff --git a/typedapi/types/enums/tokenizationtruncate/tokenizationtruncate.go b/typedapi/types/enums/tokenizationtruncate/tokenizationtruncate.go index 3f6a56fe66..0ac413a7c2 100644 --- a/typedapi/types/enums/tokenizationtruncate/tokenizationtruncate.go +++ b/typedapi/types/enums/tokenizationtruncate/tokenizationtruncate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package tokenizationtruncate package tokenizationtruncate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L350-L354 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L350-L354 type TokenizationTruncate struct { Name string } diff --git a/typedapi/types/enums/totalhitsrelation/totalhitsrelation.go b/typedapi/types/enums/totalhitsrelation/totalhitsrelation.go index c2f2062c2a..92f6b04bc9 100644 --- a/typedapi/types/enums/totalhitsrelation/totalhitsrelation.go +++ b/typedapi/types/enums/totalhitsrelation/totalhitsrelation.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package totalhitsrelation package totalhitsrelation import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/hits.ts#L99-L104 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/hits.ts#L99-L104 type TotalHitsRelation struct { Name string } diff --git a/typedapi/types/enums/trainedmodeltype/trainedmodeltype.go b/typedapi/types/enums/trainedmodeltype/trainedmodeltype.go index ac401e5293..c8501ec615 100644 --- a/typedapi/types/enums/trainedmodeltype/trainedmodeltype.go +++ b/typedapi/types/enums/trainedmodeltype/trainedmodeltype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package trainedmodeltype package trainedmodeltype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L258-L272 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L258-L272 type TrainedModelType struct { Name string } diff --git a/typedapi/types/enums/trainingpriority/trainingpriority.go b/typedapi/types/enums/trainingpriority/trainingpriority.go index 39ae7c4c68..4f5496da1a 100644 --- a/typedapi/types/enums/trainingpriority/trainingpriority.go +++ b/typedapi/types/enums/trainingpriority/trainingpriority.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package trainingpriority package trainingpriority import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L311-L314 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L311-L314 type TrainingPriority struct { Name string } diff --git a/typedapi/types/enums/translogdurability/translogdurability.go b/typedapi/types/enums/translogdurability/translogdurability.go index 33d1b14315..6d9a1765c4 100644 --- a/typedapi/types/enums/translogdurability/translogdurability.go +++ b/typedapi/types/enums/translogdurability/translogdurability.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package translogdurability package translogdurability import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L363-L378 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L365-L380 type TranslogDurability struct { Name string } diff --git a/typedapi/types/enums/ttesttype/ttesttype.go b/typedapi/types/enums/ttesttype/ttesttype.go index 0f5cefdf15..394a1d251f 100644 --- a/typedapi/types/enums/ttesttype/ttesttype.go +++ b/typedapi/types/enums/ttesttype/ttesttype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package ttesttype package ttesttype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L322-L335 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L322-L335 type TTestType struct { Name string } diff --git a/typedapi/types/enums/type_/type_.go b/typedapi/types/enums/type_/type_.go index 46d4730fd2..08fa0d4aa5 100644 --- a/typedapi/types/enums/type_/type_.go +++ b/typedapi/types/enums/type_/type_.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package type_ package type_ import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/shutdown/_types/types.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/shutdown/_types/types.ts#L20-L24 type Type struct { Name string } diff --git a/typedapi/types/enums/unassignedinformationreason/unassignedinformationreason.go b/typedapi/types/enums/unassignedinformationreason/unassignedinformationreason.go index 435a363812..314b148a60 100644 --- a/typedapi/types/enums/unassignedinformationreason/unassignedinformationreason.go +++ b/typedapi/types/enums/unassignedinformationreason/unassignedinformationreason.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package unassignedinformationreason package unassignedinformationreason import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/allocation_explain/types.ts#L127-L146 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/allocation_explain/types.ts#L127-L146 type UnassignedInformationReason struct { Name string } diff --git a/typedapi/types/enums/useragentproperty/useragentproperty.go b/typedapi/types/enums/useragentproperty/useragentproperty.go index aa2d21f0a5..fce9e19524 100644 --- a/typedapi/types/enums/useragentproperty/useragentproperty.go +++ b/typedapi/types/enums/useragentproperty/useragentproperty.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package useragentproperty package useragentproperty import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L266-L277 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L266-L277 type UserAgentProperty struct { Name string } diff --git a/typedapi/types/enums/valuetype/valuetype.go b/typedapi/types/enums/valuetype/valuetype.go index cacc0202e5..333619471b 100644 --- a/typedapi/types/enums/valuetype/valuetype.go +++ b/typedapi/types/enums/valuetype/valuetype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package valuetype package valuetype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L419-L430 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L424-L435 type ValueType struct { Name string } diff --git a/typedapi/types/enums/versiontype/versiontype.go b/typedapi/types/enums/versiontype/versiontype.go index fc1dbd6edd..9d16a5a8e4 100644 --- a/typedapi/types/enums/versiontype/versiontype.go +++ b/typedapi/types/enums/versiontype/versiontype.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package versiontype package versiontype import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L106-L122 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L106-L122 type VersionType struct { Name string } diff --git a/typedapi/types/enums/waitforactiveshardoptions/waitforactiveshardoptions.go b/typedapi/types/enums/waitforactiveshardoptions/waitforactiveshardoptions.go index 988778266d..45bb2e04ee 100644 --- a/typedapi/types/enums/waitforactiveshardoptions/waitforactiveshardoptions.go +++ b/typedapi/types/enums/waitforactiveshardoptions/waitforactiveshardoptions.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package waitforactiveshardoptions package waitforactiveshardoptions import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L305-L309 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L305-L309 type WaitForActiveShardOptions struct { Name string } diff --git a/typedapi/types/enums/waitforevents/waitforevents.go b/typedapi/types/enums/waitforevents/waitforevents.go index 7d53fcca55..3c2cccb42a 100644 --- a/typedapi/types/enums/waitforevents/waitforevents.go +++ b/typedapi/types/enums/waitforevents/waitforevents.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package waitforevents package waitforevents import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L311-L318 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L311-L318 type WaitForEvents struct { Name string } diff --git a/typedapi/types/enums/watchermetric/watchermetric.go b/typedapi/types/enums/watchermetric/watchermetric.go index 8c485d6d4e..3c63a9c59a 100644 --- a/typedapi/types/enums/watchermetric/watchermetric.go +++ b/typedapi/types/enums/watchermetric/watchermetric.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package watchermetric package watchermetric import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/stats/types.ts#L42-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/stats/types.ts#L42-L48 type WatcherMetric struct { Name string } diff --git a/typedapi/types/enums/watcherstate/watcherstate.go b/typedapi/types/enums/watcherstate/watcherstate.go index 40eb9dfcc4..2f77154445 100644 --- a/typedapi/types/enums/watcherstate/watcherstate.go +++ b/typedapi/types/enums/watcherstate/watcherstate.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package watcherstate package watcherstate import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/stats/types.ts#L26-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/stats/types.ts#L26-L31 type WatcherState struct { Name string } diff --git a/typedapi/types/enums/zerotermsquery/zerotermsquery.go b/typedapi/types/enums/zerotermsquery/zerotermsquery.go index 10cd61ce46..1ca4b8693c 100644 --- a/typedapi/types/enums/zerotermsquery/zerotermsquery.go +++ b/typedapi/types/enums/zerotermsquery/zerotermsquery.go @@ -16,14 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Package zerotermsquery package zerotermsquery import "strings" -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L569-L578 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L569-L578 type ZeroTermsQuery struct { Name string } diff --git a/typedapi/types/epochtimeunitmillis.go b/typedapi/types/epochtimeunitmillis.go index f905bc34f6..5b383993cb 100644 --- a/typedapi/types/epochtimeunitmillis.go +++ b/typedapi/types/epochtimeunitmillis.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // EpochTimeUnitMillis type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Time.ts#L40-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Time.ts#L40-L40 type EpochTimeUnitMillis int64 diff --git a/typedapi/types/epochtimeunitseconds.go b/typedapi/types/epochtimeunitseconds.go index c9a3e88221..eb94309c7e 100644 --- a/typedapi/types/epochtimeunitseconds.go +++ b/typedapi/types/epochtimeunitseconds.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // EpochTimeUnitSeconds type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Time.ts#L40-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Time.ts#L40-L40 type EpochTimeUnitSeconds int64 diff --git a/typedapi/types/eql.go b/typedapi/types/eql.go index 76681d1838..6103b19b91 100644 --- a/typedapi/types/eql.go +++ b/typedapi/types/eql.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Eql type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L351-L354 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L351-L354 type Eql struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -55,7 +55,7 @@ func (s *Eql) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -69,7 +69,7 @@ func (s *Eql) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/eqlfeatures.go b/typedapi/types/eqlfeatures.go index 9b5c6ec598..0786b532bf 100644 --- a/typedapi/types/eqlfeatures.go +++ b/typedapi/types/eqlfeatures.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // EqlFeatures type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L99-L107 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L99-L107 type EqlFeatures struct { Event uint `json:"event"` Join uint `json:"join"` diff --git a/typedapi/types/eqlfeaturesjoin.go b/typedapi/types/eqlfeaturesjoin.go index a2c18985e8..cad440ad57 100644 --- a/typedapi/types/eqlfeaturesjoin.go +++ b/typedapi/types/eqlfeaturesjoin.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // EqlFeaturesJoin type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L109-L115 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L109-L115 type EqlFeaturesJoin struct { JoinQueriesFiveOrMore uint `json:"join_queries_five_or_more"` JoinQueriesFour uint `json:"join_queries_four"` diff --git a/typedapi/types/eqlfeatureskeys.go b/typedapi/types/eqlfeatureskeys.go index 83ec5ea5d9..e07e837ad3 100644 --- a/typedapi/types/eqlfeatureskeys.go +++ b/typedapi/types/eqlfeatureskeys.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // EqlFeaturesKeys type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L117-L123 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L117-L123 type EqlFeaturesKeys struct { JoinKeysFiveOrMore uint `json:"join_keys_five_or_more"` JoinKeysFour uint `json:"join_keys_four"` diff --git a/typedapi/types/eqlfeaturespipes.go b/typedapi/types/eqlfeaturespipes.go index 4f4185ccdf..d7373a43b5 100644 --- a/typedapi/types/eqlfeaturespipes.go +++ b/typedapi/types/eqlfeaturespipes.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // EqlFeaturesPipes type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L125-L128 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L125-L128 type EqlFeaturesPipes struct { PipeHead uint `json:"pipe_head"` PipeTail uint `json:"pipe_tail"` diff --git a/typedapi/types/eqlfeaturessequences.go b/typedapi/types/eqlfeaturessequences.go index 0e16867693..df92bbce50 100644 --- a/typedapi/types/eqlfeaturessequences.go +++ b/typedapi/types/eqlfeaturessequences.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // EqlFeaturesSequences type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L130-L137 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L130-L137 type EqlFeaturesSequences struct { SequenceMaxspan uint `json:"sequence_maxspan"` SequenceQueriesFiveOrMore uint `json:"sequence_queries_five_or_more"` diff --git a/typedapi/types/eqlhits.go b/typedapi/types/eqlhits.go index 5ea1879a29..fe80f4a02d 100644 --- a/typedapi/types/eqlhits.go +++ b/typedapi/types/eqlhits.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // EqlHits type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/eql/_types/EqlHits.ts#L25-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/eql/_types/EqlHits.ts#L25-L39 type EqlHits struct { // Events Contains events matching the query. Each object represents a matching event. Events []HitsEvent `json:"events,omitempty"` diff --git a/typedapi/types/errorcause.go b/typedapi/types/errorcause.go index 6c71e532a1..2306c8a653 100644 --- a/typedapi/types/errorcause.go +++ b/typedapi/types/errorcause.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ErrorCause type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Errors.ts#L25-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Errors.ts#L25-L50 type ErrorCause struct { CausedBy *ErrorCause `json:"caused_by,omitempty"` Metadata map[string]json.RawMessage `json:"-"` @@ -140,7 +140,7 @@ func (s *ErrorCause) UnmarshalJSON(data []byte) error { func (s ErrorCause) MarshalJSON() ([]byte, error) { type opt ErrorCause // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/errorresponsebase.go b/typedapi/types/errorresponsebase.go index bcd2970fad..4511bd9f8a 100644 --- a/typedapi/types/errorresponsebase.go +++ b/typedapi/types/errorresponsebase.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ErrorResponseBase type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Base.ts#L76-L85 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Base.ts#L76-L85 type ErrorResponseBase struct { Error ErrorCause `json:"error"` Status int `json:"status"` @@ -59,7 +59,7 @@ func (s *ErrorResponseBase) UnmarshalJSON(data []byte) error { case "status": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/esqlcolumns.go b/typedapi/types/esqlcolumns.go index 7d3892da05..f45b767c53 100644 --- a/typedapi/types/esqlcolumns.go +++ b/typedapi/types/esqlcolumns.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // EsqlColumns type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Binary.ts#L24-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Binary.ts#L24-L24 type EsqlColumns []byte diff --git a/typedapi/types/eventdatastream.go b/typedapi/types/eventdatastream.go index de2b52260f..64e73313a7 100644 --- a/typedapi/types/eventdatastream.go +++ b/typedapi/types/eventdatastream.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // EventDataStream type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/search_application/_types/BehavioralAnalytics.ts#L29-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/search_application/_types/BehavioralAnalytics.ts#L29-L31 type EventDataStream struct { Name string `json:"name"` } diff --git a/typedapi/types/ewmamodelsettings.go b/typedapi/types/ewmamodelsettings.go index be6166b6dc..672c163300 100644 --- a/typedapi/types/ewmamodelsettings.go +++ b/typedapi/types/ewmamodelsettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // EwmaModelSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L267-L269 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L267-L269 type EwmaModelSettings struct { Alpha *float32 `json:"alpha,omitempty"` } @@ -52,7 +52,7 @@ func (s *EwmaModelSettings) UnmarshalJSON(data []byte) error { switch t { case "alpha": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/ewmamovingaverageaggregation.go b/typedapi/types/ewmamovingaverageaggregation.go index 03b626f3e1..0c3741bb01 100644 --- a/typedapi/types/ewmamovingaverageaggregation.go +++ b/typedapi/types/ewmamovingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // EwmaMovingAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L252-L255 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L252-L255 type EwmaMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,10 +43,8 @@ type EwmaMovingAverageAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` Minimize *bool `json:"minimize,omitempty"` Model string `json:"model,omitempty"` - Name *string `json:"name,omitempty"` Predict *int `json:"predict,omitempty"` Settings EwmaModelSettings `json:"settings"` Window *int `json:"window,omitempty"` @@ -89,13 +87,8 @@ func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "minimize": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,21 +106,9 @@ func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Model", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "predict": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -148,7 +129,7 @@ func (s *EwmaMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "window": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -174,10 +155,8 @@ func (s EwmaMovingAverageAggregation) MarshalJSON() ([]byte, error) { BucketsPath: s.BucketsPath, Format: s.Format, GapPolicy: s.GapPolicy, - Meta: s.Meta, Minimize: s.Minimize, Model: s.Model, - Name: s.Name, Predict: s.Predict, Settings: s.Settings, Window: s.Window, diff --git a/typedapi/types/executeenrichpolicystatus.go b/typedapi/types/executeenrichpolicystatus.go index f9dd0b93d0..c896ee58d5 100644 --- a/typedapi/types/executeenrichpolicystatus.go +++ b/typedapi/types/executeenrichpolicystatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // ExecuteEnrichPolicyStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/enrich/execute_policy/types.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/enrich/execute_policy/types.ts#L20-L22 type ExecuteEnrichPolicyStatus struct { Phase enrichpolicyphase.EnrichPolicyPhase `json:"phase"` } diff --git a/typedapi/types/executingpolicy.go b/typedapi/types/executingpolicy.go index b9db0f643c..14aa7947e9 100644 --- a/typedapi/types/executingpolicy.go +++ b/typedapi/types/executingpolicy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ExecutingPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/enrich/stats/types.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/enrich/stats/types.ts#L24-L27 type ExecutingPolicy struct { Name string `json:"name"` Task TaskInfo `json:"task"` diff --git a/typedapi/types/executionresult.go b/typedapi/types/executionresult.go index 849af10517..74222fdcda 100644 --- a/typedapi/types/executionresult.go +++ b/typedapi/types/executionresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ExecutionResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Execution.ts#L60-L66 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Execution.ts#L60-L66 type ExecutionResult struct { Actions []ExecutionResultAction `json:"actions"` Condition ExecutionResultCondition `json:"condition"` diff --git a/typedapi/types/executionresultaction.go b/typedapi/types/executionresultaction.go index fdc43107f5..7549bd413b 100644 --- a/typedapi/types/executionresultaction.go +++ b/typedapi/types/executionresultaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // ExecutionResultAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Execution.ts#L74-L86 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Execution.ts#L74-L86 type ExecutionResultAction struct { Email *EmailResult `json:"email,omitempty"` Error *ErrorCause `json:"error,omitempty"` diff --git a/typedapi/types/executionresultcondition.go b/typedapi/types/executionresultcondition.go index 27e217b00a..c9661cc6cf 100644 --- a/typedapi/types/executionresultcondition.go +++ b/typedapi/types/executionresultcondition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // ExecutionResultCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Execution.ts#L68-L72 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Execution.ts#L68-L72 type ExecutionResultCondition struct { Met bool `json:"met"` Status actionstatusoptions.ActionStatusOptions `json:"status"` @@ -57,7 +57,7 @@ func (s *ExecutionResultCondition) UnmarshalJSON(data []byte) error { switch t { case "met": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/executionresultinput.go b/typedapi/types/executionresultinput.go index 6d81883068..451c2924cb 100644 --- a/typedapi/types/executionresultinput.go +++ b/typedapi/types/executionresultinput.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -29,7 +29,7 @@ import ( // ExecutionResultInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Execution.ts#L88-L92 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Execution.ts#L88-L92 type ExecutionResultInput struct { Payload map[string]json.RawMessage `json:"payload"` Status actionstatusoptions.ActionStatusOptions `json:"status"` diff --git a/typedapi/types/executionstate.go b/typedapi/types/executionstate.go index df3d242106..45e38bca09 100644 --- a/typedapi/types/executionstate.go +++ b/typedapi/types/executionstate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ExecutionState type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Action.ts#L120-L124 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Action.ts#L120-L124 type ExecutionState struct { Reason *string `json:"reason,omitempty"` Successful bool `json:"successful"` @@ -66,7 +66,7 @@ func (s *ExecutionState) UnmarshalJSON(data []byte) error { s.Reason = &o case "successful": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/executionthreadpool.go b/typedapi/types/executionthreadpool.go index 00ac5a6618..9fcb814420 100644 --- a/typedapi/types/executionthreadpool.go +++ b/typedapi/types/executionthreadpool.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ExecutionThreadPool type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Execution.ts#L94-L97 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Execution.ts#L94-L97 type ExecutionThreadPool struct { MaxSize int64 `json:"max_size"` QueueSize int64 `json:"queue_size"` @@ -53,7 +53,7 @@ func (s *ExecutionThreadPool) UnmarshalJSON(data []byte) error { switch t { case "max_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -68,7 +68,7 @@ func (s *ExecutionThreadPool) UnmarshalJSON(data []byte) error { } case "queue_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/existsquery.go b/typedapi/types/existsquery.go index 6baf782962..e1868725f5 100644 --- a/typedapi/types/existsquery.go +++ b/typedapi/types/existsquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ExistsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L36-L41 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L37-L42 type ExistsQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -60,7 +60,7 @@ func (s *ExistsQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/expandwildcards.go b/typedapi/types/expandwildcards.go index a9b1afe86d..6ce075c09c 100644 --- a/typedapi/types/expandwildcards.go +++ b/typedapi/types/expandwildcards.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,5 +26,5 @@ import ( // ExpandWildcards type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L217-L217 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L217-L217 type ExpandWildcards []expandwildcard.ExpandWildcard diff --git a/typedapi/types/explainanalyzetoken.go b/typedapi/types/explainanalyzetoken.go index 809a2581ac..112e80ede7 100644 --- a/typedapi/types/explainanalyzetoken.go +++ b/typedapi/types/explainanalyzetoken.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ExplainAnalyzeToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/analyze/types.ts#L52-L64 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/analyze/types.ts#L52-L67 type ExplainAnalyzeToken struct { Bytes string `json:"bytes"` EndOffset int64 `json:"end_offset"` @@ -73,7 +73,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { s.Bytes = o case "end_offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { } case "keyword": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -102,7 +102,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { } case "position": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -117,7 +117,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { } case "positionLength": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -132,7 +132,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { } case "start_offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -147,7 +147,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { } case "termFrequency": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -207,7 +207,7 @@ func (s *ExplainAnalyzeToken) UnmarshalJSON(data []byte) error { func (s ExplainAnalyzeToken) MarshalJSON() ([]byte, error) { type opt ExplainAnalyzeToken // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/explanation.go b/typedapi/types/explanation.go index 7f7bfbf16c..8ea3dfe02d 100644 --- a/typedapi/types/explanation.go +++ b/typedapi/types/explanation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Explanation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/explain/types.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/explain/types.ts#L22-L26 type Explanation struct { Description string `json:"description"` Details []ExplanationDetail `json:"details"` @@ -71,7 +71,7 @@ func (s *Explanation) UnmarshalJSON(data []byte) error { } case "value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/explanationdetail.go b/typedapi/types/explanationdetail.go index 67f028c6d7..d00409432f 100644 --- a/typedapi/types/explanationdetail.go +++ b/typedapi/types/explanationdetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ExplanationDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/explain/types.ts#L28-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/explain/types.ts#L28-L32 type ExplanationDetail struct { Description string `json:"description"` Details []ExplanationDetail `json:"details,omitempty"` @@ -71,7 +71,7 @@ func (s *ExplanationDetail) UnmarshalJSON(data []byte) error { } case "value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/explorecontrols.go b/typedapi/types/explorecontrols.go index 582a2a9741..6a8a477472 100644 --- a/typedapi/types/explorecontrols.go +++ b/typedapi/types/explorecontrols.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ExploreControls type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/graph/_types/ExploreControls.ts#L24-L49 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/graph/_types/ExploreControls.ts#L24-L49 type ExploreControls struct { // SampleDiversity To avoid the top-matching documents sample being dominated by a single source // of results, it is sometimes necessary to request diversity in the sample. @@ -79,7 +79,7 @@ func (s *ExploreControls) UnmarshalJSON(data []byte) error { case "sample_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -99,7 +99,7 @@ func (s *ExploreControls) UnmarshalJSON(data []byte) error { } case "use_significance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/extendedboundsdouble.go b/typedapi/types/extendedboundsdouble.go index 19a3c3bff1..fe33df2b74 100644 --- a/typedapi/types/extendedboundsdouble.go +++ b/typedapi/types/extendedboundsdouble.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,12 +31,12 @@ import ( // ExtendedBoundsdouble type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L489-L498 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L491-L500 type ExtendedBoundsdouble struct { // Max Maximum value for the bound. - Max Float64 `json:"max"` + Max *Float64 `json:"max,omitempty"` // Min Minimum value for the bound. - Min Float64 `json:"min"` + Min *Float64 `json:"min,omitempty"` } func (s *ExtendedBoundsdouble) UnmarshalJSON(data []byte) error { @@ -55,7 +55,7 @@ func (s *ExtendedBoundsdouble) UnmarshalJSON(data []byte) error { switch t { case "max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -64,14 +64,14 @@ func (s *ExtendedBoundsdouble) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Max", err) } f := Float64(value) - s.Max = f + s.Max = &f case float64: f := Float64(v) - s.Max = f + s.Max = &f } case "min": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -80,10 +80,10 @@ func (s *ExtendedBoundsdouble) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Min", err) } f := Float64(value) - s.Min = f + s.Min = &f case float64: f := Float64(v) - s.Min = f + s.Min = &f } } diff --git a/typedapi/types/extendedboundsfielddatemath.go b/typedapi/types/extendedboundsfielddatemath.go index 74e3ffbf5c..eab44c0a74 100644 --- a/typedapi/types/extendedboundsfielddatemath.go +++ b/typedapi/types/extendedboundsfielddatemath.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,12 +30,12 @@ import ( // ExtendedBoundsFieldDateMath type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L489-L498 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L491-L500 type ExtendedBoundsFieldDateMath struct { // Max Maximum value for the bound. - Max FieldDateMath `json:"max"` + Max FieldDateMath `json:"max,omitempty"` // Min Minimum value for the bound. - Min FieldDateMath `json:"min"` + Min FieldDateMath `json:"min,omitempty"` } func (s *ExtendedBoundsFieldDateMath) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/extendedmemorystats.go b/typedapi/types/extendedmemorystats.go index 910ca3b59b..9c3725a794 100644 --- a/typedapi/types/extendedmemorystats.go +++ b/typedapi/types/extendedmemorystats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ExtendedMemoryStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L622-L631 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L622-L631 type ExtendedMemoryStats struct { // AdjustedTotalInBytes If the amount of physical memory has been overridden using the // `es`.`total_memory_bytes` system property then this reports the overridden @@ -72,7 +72,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { switch t { case "adjusted_total_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { } case "free_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -103,7 +103,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { case "free_percent": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -130,7 +130,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { s.Resident = &o case "resident_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -157,7 +157,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { s.Share = &o case "share_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -172,7 +172,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { } case "total_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -199,7 +199,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { s.TotalVirtual = &o case "total_virtual_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -214,7 +214,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { } case "used_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -230,7 +230,7 @@ func (s *ExtendedMemoryStats) UnmarshalJSON(data []byte) error { case "used_percent": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/extendedstatsaggregate.go b/typedapi/types/extendedstatsaggregate.go index 1b376e7e9b..b49171fb0b 100644 --- a/typedapi/types/extendedstatsaggregate.go +++ b/typedapi/types/extendedstatsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,31 +31,31 @@ import ( // ExtendedStatsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L278-L296 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L278-L296 type ExtendedStatsAggregate struct { - Avg Float64 `json:"avg,omitempty"` + Avg *Float64 `json:"avg,omitempty"` AvgAsString *string `json:"avg_as_string,omitempty"` Count int64 `json:"count"` - Max Float64 `json:"max,omitempty"` + Max *Float64 `json:"max,omitempty"` MaxAsString *string `json:"max_as_string,omitempty"` Meta Metadata `json:"meta,omitempty"` - Min Float64 `json:"min,omitempty"` + Min *Float64 `json:"min,omitempty"` MinAsString *string `json:"min_as_string,omitempty"` - StdDeviation Float64 `json:"std_deviation,omitempty"` + StdDeviation *Float64 `json:"std_deviation,omitempty"` StdDeviationAsString *string `json:"std_deviation_as_string,omitempty"` StdDeviationBounds *StandardDeviationBounds `json:"std_deviation_bounds,omitempty"` StdDeviationBoundsAsString *StandardDeviationBoundsAsString `json:"std_deviation_bounds_as_string,omitempty"` - StdDeviationPopulation Float64 `json:"std_deviation_population,omitempty"` - StdDeviationSampling Float64 `json:"std_deviation_sampling,omitempty"` + StdDeviationPopulation *Float64 `json:"std_deviation_population,omitempty"` + StdDeviationSampling *Float64 `json:"std_deviation_sampling,omitempty"` Sum Float64 `json:"sum"` SumAsString *string `json:"sum_as_string,omitempty"` - SumOfSquares Float64 `json:"sum_of_squares,omitempty"` + SumOfSquares *Float64 `json:"sum_of_squares,omitempty"` SumOfSquaresAsString *string `json:"sum_of_squares_as_string,omitempty"` - Variance Float64 `json:"variance,omitempty"` + Variance *Float64 `json:"variance,omitempty"` VarianceAsString *string `json:"variance_as_string,omitempty"` - VariancePopulation Float64 `json:"variance_population,omitempty"` + VariancePopulation *Float64 `json:"variance_population,omitempty"` VariancePopulationAsString *string `json:"variance_population_as_string,omitempty"` - VarianceSampling Float64 `json:"variance_sampling,omitempty"` + VarianceSampling *Float64 `json:"variance_sampling,omitempty"` VarianceSamplingAsString *string `json:"variance_sampling_as_string,omitempty"` } @@ -92,7 +92,7 @@ func (s *ExtendedStatsAggregate) UnmarshalJSON(data []byte) error { s.AvgAsString = &o case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -183,7 +183,7 @@ func (s *ExtendedStatsAggregate) UnmarshalJSON(data []byte) error { } case "sum": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/extendedstatsaggregation.go b/typedapi/types/extendedstatsaggregation.go index 30158ed90a..830432b932 100644 --- a/typedapi/types/extendedstatsaggregation.go +++ b/typedapi/types/extendedstatsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ExtendedStatsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L101-L106 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L101-L106 type ExtendedStatsAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -118,7 +118,7 @@ func (s *ExtendedStatsAggregation) UnmarshalJSON(data []byte) error { } case "sigma": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/extendedstatsbucketaggregate.go b/typedapi/types/extendedstatsbucketaggregate.go index da93065103..7e862d05f1 100644 --- a/typedapi/types/extendedstatsbucketaggregate.go +++ b/typedapi/types/extendedstatsbucketaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,31 +31,31 @@ import ( // ExtendedStatsBucketAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L298-L299 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L298-L299 type ExtendedStatsBucketAggregate struct { - Avg Float64 `json:"avg,omitempty"` + Avg *Float64 `json:"avg,omitempty"` AvgAsString *string `json:"avg_as_string,omitempty"` Count int64 `json:"count"` - Max Float64 `json:"max,omitempty"` + Max *Float64 `json:"max,omitempty"` MaxAsString *string `json:"max_as_string,omitempty"` Meta Metadata `json:"meta,omitempty"` - Min Float64 `json:"min,omitempty"` + Min *Float64 `json:"min,omitempty"` MinAsString *string `json:"min_as_string,omitempty"` - StdDeviation Float64 `json:"std_deviation,omitempty"` + StdDeviation *Float64 `json:"std_deviation,omitempty"` StdDeviationAsString *string `json:"std_deviation_as_string,omitempty"` StdDeviationBounds *StandardDeviationBounds `json:"std_deviation_bounds,omitempty"` StdDeviationBoundsAsString *StandardDeviationBoundsAsString `json:"std_deviation_bounds_as_string,omitempty"` - StdDeviationPopulation Float64 `json:"std_deviation_population,omitempty"` - StdDeviationSampling Float64 `json:"std_deviation_sampling,omitempty"` + StdDeviationPopulation *Float64 `json:"std_deviation_population,omitempty"` + StdDeviationSampling *Float64 `json:"std_deviation_sampling,omitempty"` Sum Float64 `json:"sum"` SumAsString *string `json:"sum_as_string,omitempty"` - SumOfSquares Float64 `json:"sum_of_squares,omitempty"` + SumOfSquares *Float64 `json:"sum_of_squares,omitempty"` SumOfSquaresAsString *string `json:"sum_of_squares_as_string,omitempty"` - Variance Float64 `json:"variance,omitempty"` + Variance *Float64 `json:"variance,omitempty"` VarianceAsString *string `json:"variance_as_string,omitempty"` - VariancePopulation Float64 `json:"variance_population,omitempty"` + VariancePopulation *Float64 `json:"variance_population,omitempty"` VariancePopulationAsString *string `json:"variance_population_as_string,omitempty"` - VarianceSampling Float64 `json:"variance_sampling,omitempty"` + VarianceSampling *Float64 `json:"variance_sampling,omitempty"` VarianceSamplingAsString *string `json:"variance_sampling_as_string,omitempty"` } @@ -92,7 +92,7 @@ func (s *ExtendedStatsBucketAggregate) UnmarshalJSON(data []byte) error { s.AvgAsString = &o case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -183,7 +183,7 @@ func (s *ExtendedStatsBucketAggregate) UnmarshalJSON(data []byte) error { } case "sum": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/extendedstatsbucketaggregation.go b/typedapi/types/extendedstatsbucketaggregation.go index 1a08f69dfe..7f0f11e046 100644 --- a/typedapi/types/extendedstatsbucketaggregation.go +++ b/typedapi/types/extendedstatsbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // ExtendedStatsBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L198-L203 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L198-L203 type ExtendedStatsBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,8 +43,6 @@ type ExtendedStatsBucketAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` // Sigma The number of standard deviations above/below the mean to display. Sigma *Float64 `json:"sigma,omitempty"` } @@ -86,25 +84,8 @@ func (s *ExtendedStatsBucketAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "sigma": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/failprocessor.go b/typedapi/types/failprocessor.go index 8777b8daa8..4eb06941d1 100644 --- a/typedapi/types/failprocessor.go +++ b/typedapi/types/failprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FailProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L648-L654 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L648-L654 type FailProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -90,7 +90,7 @@ func (s *FailProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/feature.go b/typedapi/types/feature.go index 9aa17233b9..fa0ca41ae8 100644 --- a/typedapi/types/feature.go +++ b/typedapi/types/feature.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Feature type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/features/_types/Feature.ts#L20-L23 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/features/_types/Feature.ts#L20-L23 type Feature struct { Description string `json:"description"` Name string `json:"name"` diff --git a/typedapi/types/features.go b/typedapi/types/features.go index 6b5dc640f2..6dd3e232bf 100644 --- a/typedapi/types/features.go +++ b/typedapi/types/features.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,5 +26,5 @@ import ( // Features type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get/IndicesGetRequest.ts#L95-L95 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get/IndicesGetRequest.ts#L95-L95 type Features []feature.Feature diff --git a/typedapi/types/featuretoggle.go b/typedapi/types/featuretoggle.go index ad784f0c00..8134f15539 100644 --- a/typedapi/types/featuretoggle.go +++ b/typedapi/types/featuretoggle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FeatureToggle type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L40-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L40-L42 type FeatureToggle struct { Enabled bool `json:"enabled"` } @@ -52,7 +52,7 @@ func (s *FeatureToggle) UnmarshalJSON(data []byte) error { switch t { case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fetchprofile.go b/typedapi/types/fetchprofile.go index 8da9ce55b8..19228f8268 100644 --- a/typedapi/types/fetchprofile.go +++ b/typedapi/types/fetchprofile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FetchProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/profile.ts#L139-L146 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/profile.ts#L139-L146 type FetchProfile struct { Breakdown FetchProfileBreakdown `json:"breakdown"` Children []FetchProfile `json:"children,omitempty"` diff --git a/typedapi/types/fetchprofilebreakdown.go b/typedapi/types/fetchprofilebreakdown.go index 2e0f304669..ee7fc8b942 100644 --- a/typedapi/types/fetchprofilebreakdown.go +++ b/typedapi/types/fetchprofilebreakdown.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FetchProfileBreakdown type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/profile.ts#L148-L157 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/profile.ts#L148-L157 type FetchProfileBreakdown struct { LoadSource *int `json:"load_source,omitempty"` LoadSourceCount *int `json:"load_source_count,omitempty"` @@ -60,7 +60,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case "load_source": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -76,7 +76,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case "load_source_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case "load_stored_fields": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -108,7 +108,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case "load_stored_fields_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -124,7 +124,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case "next_reader": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -140,7 +140,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case "next_reader_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -156,7 +156,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case "process": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -172,7 +172,7 @@ func (s *FetchProfileBreakdown) UnmarshalJSON(data []byte) error { case "process_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fetchprofiledebug.go b/typedapi/types/fetchprofiledebug.go index be304f2c4d..37987d27db 100644 --- a/typedapi/types/fetchprofiledebug.go +++ b/typedapi/types/fetchprofiledebug.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FetchProfileDebug type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/profile.ts#L159-L162 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/profile.ts#L159-L162 type FetchProfileDebug struct { FastPath *int `json:"fast_path,omitempty"` StoredFields []string `json:"stored_fields,omitempty"` @@ -54,7 +54,7 @@ func (s *FetchProfileDebug) UnmarshalJSON(data []byte) error { case "fast_path": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fieldaliasproperty.go b/typedapi/types/fieldaliasproperty.go index c2420e4233..1bb1d02c2c 100644 --- a/typedapi/types/fieldaliasproperty.go +++ b/typedapi/types/fieldaliasproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // FieldAliasProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/specialized.ts#L49-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/specialized.ts#L55-L58 type FieldAliasProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -72,7 +72,7 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -93,7 +93,7 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -183,12 +183,6 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -207,6 +201,18 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -363,6 +369,12 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -374,7 +386,7 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -408,7 +420,7 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -429,7 +441,7 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -519,12 +531,6 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -543,6 +549,18 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -699,6 +717,12 @@ func (s *FieldAliasProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { diff --git a/typedapi/types/fieldandformat.go b/typedapi/types/fieldandformat.go index 5eba0227e3..2cebea171b 100644 --- a/typedapi/types/fieldandformat.go +++ b/typedapi/types/fieldandformat.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FieldAndFormat type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/abstractions.ts#L505-L519 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/abstractions.ts#L525-L539 type FieldAndFormat struct { // Field Wildcard pattern. The request returns values for field names matching this // pattern. @@ -83,7 +83,7 @@ func (s *FieldAndFormat) UnmarshalJSON(data []byte) error { s.Format = &o case "include_unmapped": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fieldcapability.go b/typedapi/types/fieldcapability.go index 17530037f4..2683ecb45e 100644 --- a/typedapi/types/fieldcapability.go +++ b/typedapi/types/fieldcapability.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // FieldCapability type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/field_caps/types.ts#L23-L81 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/field_caps/types.ts#L23-L81 type FieldCapability struct { // Aggregatable Whether this field can be aggregated on all indices. Aggregatable bool `json:"aggregatable"` @@ -85,7 +85,7 @@ func (s *FieldCapability) UnmarshalJSON(data []byte) error { switch t { case "aggregatable": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -120,7 +120,7 @@ func (s *FieldCapability) UnmarshalJSON(data []byte) error { } case "metadata_field": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -176,7 +176,7 @@ func (s *FieldCapability) UnmarshalJSON(data []byte) error { } case "searchable": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -190,7 +190,7 @@ func (s *FieldCapability) UnmarshalJSON(data []byte) error { } case "time_series_dimension": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fieldcollapse.go b/typedapi/types/fieldcollapse.go index 73cf7bf3b4..34066fa051 100644 --- a/typedapi/types/fieldcollapse.go +++ b/typedapi/types/fieldcollapse.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FieldCollapse type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/FieldCollapse.ts#L24-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/FieldCollapse.ts#L24-L38 type FieldCollapse struct { Collapse *FieldCollapse `json:"collapse,omitempty"` // Field The field to collapse the result set on @@ -86,7 +86,7 @@ func (s *FieldCollapse) UnmarshalJSON(data []byte) error { case "max_concurrent_group_searches": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fielddatafrequencyfilter.go b/typedapi/types/fielddatafrequencyfilter.go index 79e85a389e..9d4778291e 100644 --- a/typedapi/types/fielddatafrequencyfilter.go +++ b/typedapi/types/fielddatafrequencyfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FielddataFrequencyFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/FielddataFrequencyFilter.ts#L22-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/FielddataFrequencyFilter.ts#L22-L26 type FielddataFrequencyFilter struct { Max Float64 `json:"max"` Min Float64 `json:"min"` @@ -54,7 +54,7 @@ func (s *FielddataFrequencyFilter) UnmarshalJSON(data []byte) error { switch t { case "max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *FielddataFrequencyFilter) UnmarshalJSON(data []byte) error { } case "min": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *FielddataFrequencyFilter) UnmarshalJSON(data []byte) error { case "min_segment_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fielddatarecord.go b/typedapi/types/fielddatarecord.go index adfada8620..af437d1812 100644 --- a/typedapi/types/fielddatarecord.go +++ b/typedapi/types/fielddatarecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FielddataRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/fielddata/types.ts#L20-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/fielddata/types.ts#L20-L48 type FielddataRecord struct { // Field field name Field *string `json:"field,omitempty"` diff --git a/typedapi/types/fielddatastats.go b/typedapi/types/fielddatastats.go index e808e63ce5..6ce21dc743 100644 --- a/typedapi/types/fielddatastats.go +++ b/typedapi/types/fielddatastats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FielddataStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L111-L116 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L111-L116 type FielddataStats struct { Evictions *int64 `json:"evictions,omitempty"` Fields map[string]FieldMemoryUsage `json:"fields,omitempty"` @@ -55,7 +55,7 @@ func (s *FielddataStats) UnmarshalJSON(data []byte) error { switch t { case "evictions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *FielddataStats) UnmarshalJSON(data []byte) error { } case "memory_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fielddatemath.go b/typedapi/types/fielddatemath.go index 3b9b79601c..a19f9bd5c7 100644 --- a/typedapi/types/fielddatemath.go +++ b/typedapi/types/fielddatemath.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // string // Float64 // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L296-L303 -type FieldDateMath interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L298-L305 +type FieldDateMath any diff --git a/typedapi/types/fieldlookup.go b/typedapi/types/fieldlookup.go index 7501005bd4..04d8feeb04 100644 --- a/typedapi/types/fieldlookup.go +++ b/typedapi/types/fieldlookup.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // FieldLookup type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/abstractions.ts#L409-L426 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/abstractions.ts#L429-L446 type FieldLookup struct { // Id `id` of the document. Id string `json:"id"` diff --git a/typedapi/types/fieldmapping.go b/typedapi/types/fieldmapping.go index e7befb2c46..bd5cd55570 100644 --- a/typedapi/types/fieldmapping.go +++ b/typedapi/types/fieldmapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FieldMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/meta-fields.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/meta-fields.ts#L24-L27 type FieldMapping struct { FullName string `json:"full_name"` Mapping map[string]Property `json:"mapping"` @@ -71,7 +71,7 @@ func (s *FieldMapping) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -92,7 +92,7 @@ func (s *FieldMapping) UnmarshalJSON(data []byte) error { return err } s.Mapping[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -182,12 +182,6 @@ func (s *FieldMapping) UnmarshalJSON(data []byte) error { return err } s.Mapping[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Mapping[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -206,6 +200,18 @@ func (s *FieldMapping) UnmarshalJSON(data []byte) error { return err } s.Mapping[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Mapping[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Mapping[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -362,6 +368,12 @@ func (s *FieldMapping) UnmarshalJSON(data []byte) error { return err } s.Mapping[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Mapping[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { diff --git a/typedapi/types/fieldmemoryusage.go b/typedapi/types/fieldmemoryusage.go index 9f9f1dac95..7cd0116a5a 100644 --- a/typedapi/types/fieldmemoryusage.go +++ b/typedapi/types/fieldmemoryusage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FieldMemoryUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L118-L121 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L118-L121 type FieldMemoryUsage struct { MemorySize ByteSize `json:"memory_size,omitempty"` MemorySizeInBytes int64 `json:"memory_size_in_bytes"` @@ -58,7 +58,7 @@ func (s *FieldMemoryUsage) UnmarshalJSON(data []byte) error { } case "memory_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fieldmetric.go b/typedapi/types/fieldmetric.go index 4c193991f0..9800340d63 100644 --- a/typedapi/types/fieldmetric.go +++ b/typedapi/types/fieldmetric.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // FieldMetric type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/_types/Metric.ts#L30-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/_types/Metric.ts#L30-L35 type FieldMetric struct { // Field The field to collect metrics for. This must be a numeric of some kind. Field string `json:"field"` diff --git a/typedapi/types/fieldnamesfield.go b/typedapi/types/fieldnamesfield.go index 88eab14fa1..b081477e0a 100644 --- a/typedapi/types/fieldnamesfield.go +++ b/typedapi/types/fieldnamesfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FieldNamesField type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/meta-fields.ts#L42-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/meta-fields.ts#L42-L44 type FieldNamesField struct { Enabled bool `json:"enabled"` } @@ -52,7 +52,7 @@ func (s *FieldNamesField) UnmarshalJSON(data []byte) error { switch t { case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fieldrule.go b/typedapi/types/fieldrule.go index fde2d446be..3877d6cdc9 100644 --- a/typedapi/types/fieldrule.go +++ b/typedapi/types/fieldrule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // FieldRule type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/RoleMappingRule.ts#L36-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/RoleMappingRule.ts#L36-L44 type FieldRule struct { Dn []string `json:"dn,omitempty"` Groups []string `json:"groups,omitempty"` diff --git a/typedapi/types/fields.go b/typedapi/types/fields.go index bc1ec825be..337c6463c5 100644 --- a/typedapi/types/fields.go +++ b/typedapi/types/fields.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Fields type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L140-L140 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L140-L140 type Fields []string diff --git a/typedapi/types/fieldsecurity.go b/typedapi/types/fieldsecurity.go index 5a532af8af..cd5ff203c3 100644 --- a/typedapi/types/fieldsecurity.go +++ b/typedapi/types/fieldsecurity.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // FieldSecurity type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/FieldSecurity.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/FieldSecurity.ts#L22-L25 type FieldSecurity struct { Except []string `json:"except,omitempty"` Grant []string `json:"grant,omitempty"` diff --git a/typedapi/types/fieldsizeusage.go b/typedapi/types/fieldsizeusage.go index 762b1094e5..fd244bacd7 100644 --- a/typedapi/types/fieldsizeusage.go +++ b/typedapi/types/fieldsizeusage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FieldSizeUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L92-L95 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L92-L95 type FieldSizeUsage struct { Size ByteSize `json:"size,omitempty"` SizeInBytes int64 `json:"size_in_bytes"` @@ -58,7 +58,7 @@ func (s *FieldSizeUsage) UnmarshalJSON(data []byte) error { } case "size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fieldsort.go b/typedapi/types/fieldsort.go index 2fffa14917..5d35c5169a 100644 --- a/typedapi/types/fieldsort.go +++ b/typedapi/types/fieldsort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -36,7 +36,7 @@ import ( // FieldSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/sort.ts#L44-L53 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/sort.ts#L44-L53 type FieldSort struct { Format *string `json:"format,omitempty"` Missing Missing `json:"missing,omitempty"` diff --git a/typedapi/types/fieldstat.go b/typedapi/types/fieldstat.go index f0081c93bb..b1c7a0d168 100644 --- a/typedapi/types/fieldstat.go +++ b/typedapi/types/fieldstat.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FieldStat type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/text_structure/find_structure/types.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/text_structure/find_structure/types.ts#L23-L33 type FieldStat struct { Cardinality int `json:"cardinality"` Count int `json:"count"` @@ -61,7 +61,7 @@ func (s *FieldStat) UnmarshalJSON(data []byte) error { case "cardinality": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -77,7 +77,7 @@ func (s *FieldStat) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -117,7 +117,7 @@ func (s *FieldStat) UnmarshalJSON(data []byte) error { case "max_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -133,7 +133,7 @@ func (s *FieldStat) UnmarshalJSON(data []byte) error { case "mean_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -149,7 +149,7 @@ func (s *FieldStat) UnmarshalJSON(data []byte) error { case "median_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -165,7 +165,7 @@ func (s *FieldStat) UnmarshalJSON(data []byte) error { case "min_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fieldstatistics.go b/typedapi/types/fieldstatistics.go index 97c4af1afc..4b28753b36 100644 --- a/typedapi/types/fieldstatistics.go +++ b/typedapi/types/fieldstatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FieldStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/termvectors/types.ts#L28-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/termvectors/types.ts#L28-L32 type FieldStatistics struct { DocCount int `json:"doc_count"` SumDocFreq int64 `json:"sum_doc_freq"` @@ -55,7 +55,7 @@ func (s *FieldStatistics) UnmarshalJSON(data []byte) error { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *FieldStatistics) UnmarshalJSON(data []byte) error { } case "sum_doc_freq": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *FieldStatistics) UnmarshalJSON(data []byte) error { } case "sum_ttf": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fieldsuggester.go b/typedapi/types/fieldsuggester.go index 3e955faa30..fef15cb59e 100644 --- a/typedapi/types/fieldsuggester.go +++ b/typedapi/types/fieldsuggester.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FieldSuggester type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L106-L139 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L109-L142 type FieldSuggester struct { // Completion Provides auto-complete/search-as-you-type functionality. Completion *CompletionSuggester `json:"completion,omitempty"` diff --git a/typedapi/types/fieldsummary.go b/typedapi/types/fieldsummary.go index 2b119048bb..90640cf426 100644 --- a/typedapi/types/fieldsummary.go +++ b/typedapi/types/fieldsummary.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // FieldSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L54-L63 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L57-L66 type FieldSummary struct { Any uint `json:"any"` DocValues uint `json:"doc_values"` diff --git a/typedapi/types/fieldsusagebody.go b/typedapi/types/fieldsusagebody.go index 5c58ec4fe8..50a7ad406e 100644 --- a/typedapi/types/fieldsusagebody.go +++ b/typedapi/types/fieldsusagebody.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -27,7 +27,7 @@ import ( // FieldsUsageBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L32-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L32-L39 type FieldsUsageBody struct { FieldsUsageBody map[string]UsageStatsIndex `json:"-"` Shards_ ShardStatistics `json:"_shards"` @@ -37,7 +37,7 @@ type FieldsUsageBody struct { func (s FieldsUsageBody) MarshalJSON() ([]byte, error) { type opt FieldsUsageBody // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/fieldtypes.go b/typedapi/types/fieldtypes.go index b00d8c9106..58c5380a2a 100644 --- a/typedapi/types/fieldtypes.go +++ b/typedapi/types/fieldtypes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FieldTypes type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L136-L167 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L136-L167 type FieldTypes struct { // Count The number of occurrences of the field type in selected nodes. Count int `json:"count"` @@ -69,7 +69,7 @@ func (s *FieldTypes) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *FieldTypes) UnmarshalJSON(data []byte) error { case "index_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *FieldTypes) UnmarshalJSON(data []byte) error { } case "indexed_vector_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -115,7 +115,7 @@ func (s *FieldTypes) UnmarshalJSON(data []byte) error { } case "indexed_vector_dim_max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -130,7 +130,7 @@ func (s *FieldTypes) UnmarshalJSON(data []byte) error { } case "indexed_vector_dim_min": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -151,7 +151,7 @@ func (s *FieldTypes) UnmarshalJSON(data []byte) error { case "script_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fieldtypesmappings.go b/typedapi/types/fieldtypesmappings.go index 252d40f4f0..e2844106b7 100644 --- a/typedapi/types/fieldtypesmappings.go +++ b/typedapi/types/fieldtypesmappings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FieldTypesMappings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L109-L134 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L109-L134 type FieldTypesMappings struct { // FieldTypes Contains statistics about field data types used in selected nodes. FieldTypes []FieldTypes `json:"field_types"` @@ -75,7 +75,7 @@ func (s *FieldTypesMappings) UnmarshalJSON(data []byte) error { case "total_deduplicated_field_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *FieldTypesMappings) UnmarshalJSON(data []byte) error { } case "total_deduplicated_mapping_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -111,7 +111,7 @@ func (s *FieldTypesMappings) UnmarshalJSON(data []byte) error { case "total_field_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fieldvalue.go b/typedapi/types/fieldvalue.go index f8dc6f3b27..85b7ac882a 100644 --- a/typedapi/types/fieldvalue.go +++ b/typedapi/types/fieldvalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -29,5 +29,5 @@ package types // nil // json.RawMessage // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L25-L37 -type FieldValue interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L25-L37 +type FieldValue any diff --git a/typedapi/types/fieldvaluefactorscorefunction.go b/typedapi/types/fieldvaluefactorscorefunction.go index 50a77786e6..e8fe0189e5 100644 --- a/typedapi/types/fieldvaluefactorscorefunction.go +++ b/typedapi/types/fieldvaluefactorscorefunction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // FieldValueFactorScoreFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L132-L151 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L133-L152 type FieldValueFactorScoreFunction struct { // Factor Optional factor to multiply the field value with. Factor *Float64 `json:"factor,omitempty"` @@ -63,7 +63,7 @@ func (s *FieldValueFactorScoreFunction) UnmarshalJSON(data []byte) error { switch t { case "factor": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -84,7 +84,7 @@ func (s *FieldValueFactorScoreFunction) UnmarshalJSON(data []byte) error { } case "missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/filecountsnapshotstats.go b/typedapi/types/filecountsnapshotstats.go index b986118220..dcbb0f6c69 100644 --- a/typedapi/types/filecountsnapshotstats.go +++ b/typedapi/types/filecountsnapshotstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FileCountSnapshotStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/FileCountSnapshotStats.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/FileCountSnapshotStats.ts#L22-L25 type FileCountSnapshotStats struct { FileCount int `json:"file_count"` SizeInBytes int64 `json:"size_in_bytes"` @@ -54,7 +54,7 @@ func (s *FileCountSnapshotStats) UnmarshalJSON(data []byte) error { case "file_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -69,7 +69,7 @@ func (s *FileCountSnapshotStats) UnmarshalJSON(data []byte) error { } case "size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/filedetails.go b/typedapi/types/filedetails.go index e7ed18d997..47791ae314 100644 --- a/typedapi/types/filedetails.go +++ b/typedapi/types/filedetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FileDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/recovery/types.ts#L50-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/recovery/types.ts#L50-L54 type FileDetails struct { Length int64 `json:"length"` Name string `json:"name"` @@ -54,7 +54,7 @@ func (s *FileDetails) UnmarshalJSON(data []byte) error { switch t { case "length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -81,7 +81,7 @@ func (s *FileDetails) UnmarshalJSON(data []byte) error { s.Name = o case "recovered": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/filesystem.go b/typedapi/types/filesystem.go index 2984471387..0e47c7b2f5 100644 --- a/typedapi/types/filesystem.go +++ b/typedapi/types/filesystem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FileSystem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L698-L716 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L698-L716 type FileSystem struct { // Data List of all file stores. Data []DataPathStats `json:"data,omitempty"` @@ -70,7 +70,7 @@ func (s *FileSystem) UnmarshalJSON(data []byte) error { } case "timestamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/filesystemtotal.go b/typedapi/types/filesystemtotal.go index 902eb09cb5..450bc0e248 100644 --- a/typedapi/types/filesystemtotal.go +++ b/typedapi/types/filesystemtotal.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FileSystemTotal type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L757-L786 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L757-L786 type FileSystemTotal struct { // Available Total disk space available to this Java virtual machine on all file stores. // Depending on OS or process level restrictions, this might appear less than @@ -84,7 +84,7 @@ func (s *FileSystemTotal) UnmarshalJSON(data []byte) error { s.Available = &o case "available_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -111,7 +111,7 @@ func (s *FileSystemTotal) UnmarshalJSON(data []byte) error { s.Free = &o case "free_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -138,7 +138,7 @@ func (s *FileSystemTotal) UnmarshalJSON(data []byte) error { s.Total = &o case "total_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fillmaskinferenceoptions.go b/typedapi/types/fillmaskinferenceoptions.go index acdee663c5..cee7d27ce9 100644 --- a/typedapi/types/fillmaskinferenceoptions.go +++ b/typedapi/types/fillmaskinferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FillMaskInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L266-L280 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L266-L280 type FillMaskInferenceOptions struct { // MaskToken The string/token which will be removed from incoming documents and replaced // with the inference prediction(s). @@ -81,7 +81,7 @@ func (s *FillMaskInferenceOptions) UnmarshalJSON(data []byte) error { case "num_top_classes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fillmaskinferenceupdateoptions.go b/typedapi/types/fillmaskinferenceupdateoptions.go index 163ba7f238..9535379b69 100644 --- a/typedapi/types/fillmaskinferenceupdateoptions.go +++ b/typedapi/types/fillmaskinferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FillMaskInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L411-L418 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L411-L418 type FillMaskInferenceUpdateOptions struct { // NumTopClasses Specifies the number of top class predictions to return. Defaults to 0. NumTopClasses *int `json:"num_top_classes,omitempty"` @@ -59,7 +59,7 @@ func (s *FillMaskInferenceUpdateOptions) UnmarshalJSON(data []byte) error { case "num_top_classes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/filteraggregate.go b/typedapi/types/filteraggregate.go index abc5b2d6d0..b36fb4d5ff 100644 --- a/typedapi/types/filteraggregate.go +++ b/typedapi/types/filteraggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // FilterAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L495-L496 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L499-L500 type FilterAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -55,7 +55,7 @@ func (s *FilterAggregate) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +519,7 @@ func (s *FilterAggregate) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -569,7 +569,7 @@ func (s *FilterAggregate) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -579,7 +579,7 @@ func (s *FilterAggregate) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -596,7 +596,7 @@ func (s *FilterAggregate) UnmarshalJSON(data []byte) error { func (s FilterAggregate) MarshalJSON() ([]byte, error) { type opt FilterAggregate // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/filterref.go b/typedapi/types/filterref.go index d84bcb9efd..7aafa3b93c 100644 --- a/typedapi/types/filterref.go +++ b/typedapi/types/filterref.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // FilterRef type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Filter.ts#L31-L41 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Filter.ts#L31-L41 type FilterRef struct { // FilterId The identifier for the filter. FilterId string `json:"filter_id"` diff --git a/typedapi/types/filtersaggregate.go b/typedapi/types/filtersaggregate.go index f2d0dfae81..5d7f0dc6ac 100644 --- a/typedapi/types/filtersaggregate.go +++ b/typedapi/types/filtersaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // FiltersAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L568-L569 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L572-L573 type FiltersAggregate struct { Buckets BucketsFiltersBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/filtersaggregation.go b/typedapi/types/filtersaggregation.go index 3b9d345f9e..07c27c7031 100644 --- a/typedapi/types/filtersaggregation.go +++ b/typedapi/types/filtersaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,15 +31,13 @@ import ( // FiltersAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L358-L378 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L360-L380 type FiltersAggregation struct { // Filters Collection of queries from which to build buckets. Filters BucketsQuery `json:"filters,omitempty"` // Keyed By default, the named filters aggregation returns the buckets as an object. // Set to `false` to return the buckets as an array of objects. - Keyed *bool `json:"keyed,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + Keyed *bool `json:"keyed,omitempty"` // OtherBucket Set to `true` to add a bucket to the response which will contain all // documents that do not match any of the given filters. OtherBucket *bool `json:"other_bucket,omitempty"` @@ -84,7 +82,7 @@ func (s *FiltersAggregation) UnmarshalJSON(data []byte) error { } case "keyed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -97,25 +95,8 @@ func (s *FiltersAggregation) UnmarshalJSON(data []byte) error { s.Keyed = &v } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "other_bucket": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/filtersbucket.go b/typedapi/types/filtersbucket.go index 00ef949c5a..6bc4853343 100644 --- a/typedapi/types/filtersbucket.go +++ b/typedapi/types/filtersbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // FiltersBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L571-L571 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L575-L575 type FiltersBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -54,7 +54,7 @@ func (s *FiltersBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -513,7 +513,7 @@ func (s *FiltersBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -563,7 +563,7 @@ func (s *FiltersBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -573,7 +573,7 @@ func (s *FiltersBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -590,7 +590,7 @@ func (s *FiltersBucket) UnmarshalJSON(data []byte) error { func (s FiltersBucket) MarshalJSON() ([]byte, error) { type opt FiltersBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/fingerprintanalyzer.go b/typedapi/types/fingerprintanalyzer.go index f5fd469d47..20b6ef9e7b 100644 --- a/typedapi/types/fingerprintanalyzer.go +++ b/typedapi/types/fingerprintanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FingerprintAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/analyzers.ts#L37-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/analyzers.ts#L37-L45 type FingerprintAnalyzer struct { MaxOutputSize int `json:"max_output_size"` PreserveOriginal bool `json:"preserve_original"` @@ -59,7 +59,7 @@ func (s *FingerprintAnalyzer) UnmarshalJSON(data []byte) error { case "max_output_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -74,7 +74,7 @@ func (s *FingerprintAnalyzer) UnmarshalJSON(data []byte) error { } case "preserve_original": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fingerprinttokenfilter.go b/typedapi/types/fingerprinttokenfilter.go index 474d3cbe22..65962e215c 100644 --- a/typedapi/types/fingerprinttokenfilter.go +++ b/typedapi/types/fingerprinttokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FingerprintTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L194-L198 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L196-L200 type FingerprintTokenFilter struct { MaxOutputSize *int `json:"max_output_size,omitempty"` Separator *string `json:"separator,omitempty"` @@ -56,7 +56,7 @@ func (s *FingerprintTokenFilter) UnmarshalJSON(data []byte) error { case "max_output_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/flattened.go b/typedapi/types/flattened.go index 9d9647c5fc..5c1bbd3d30 100644 --- a/typedapi/types/flattened.go +++ b/typedapi/types/flattened.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Flattened type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L356-L358 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L356-L358 type Flattened struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -54,7 +54,7 @@ func (s *Flattened) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -68,7 +68,7 @@ func (s *Flattened) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *Flattened) UnmarshalJSON(data []byte) error { case "field_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/flattenedproperty.go b/typedapi/types/flattenedproperty.go index f8489651a9..4e1f38d385 100644 --- a/typedapi/types/flattenedproperty.go +++ b/typedapi/types/flattenedproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // FlattenedProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/complex.ts#L26-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/complex.ts#L26-L37 type FlattenedProperty struct { Boost *Float64 `json:"boost,omitempty"` DepthLimit *int `json:"depth_limit,omitempty"` @@ -70,7 +70,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { case "depth_limit": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -102,7 +102,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -121,7 +121,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { } case "eager_global_ordinals": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -141,7 +141,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -162,7 +162,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -252,12 +252,6 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -276,6 +270,18 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -432,6 +438,12 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -443,7 +455,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -458,7 +470,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -503,7 +515,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -524,7 +536,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -614,12 +626,6 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -638,6 +644,18 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -794,6 +812,12 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -816,7 +840,7 @@ func (s *FlattenedProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "split_queries_on_whitespace": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/floatnumberproperty.go b/typedapi/types/floatnumberproperty.go index afe8bea64d..13541181f3 100644 --- a/typedapi/types/floatnumberproperty.go +++ b/typedapi/types/floatnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // FloatNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L134-L137 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L142-L145 type FloatNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -79,7 +79,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -150,7 +150,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -171,7 +171,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -261,12 +261,6 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -285,6 +279,18 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -441,6 +447,12 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -452,7 +464,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -467,7 +479,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -481,7 +493,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -503,7 +515,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { } case "null_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -530,7 +542,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -551,7 +563,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -641,12 +653,6 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -665,6 +671,18 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -821,6 +839,12 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -879,7 +903,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -893,7 +917,7 @@ func (s *FloatNumberProperty) UnmarshalJSON(data []byte) error { } case "time_series_dimension": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/floatrangeproperty.go b/typedapi/types/floatrangeproperty.go index 35e0bfe375..53b13a3ee4 100644 --- a/typedapi/types/floatrangeproperty.go +++ b/typedapi/types/floatrangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // FloatRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/range.ts#L38-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/range.ts#L38-L40 type FloatRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -67,7 +67,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,7 +113,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -138,7 +138,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -159,7 +159,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -249,12 +249,6 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -273,6 +267,18 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -429,6 +435,12 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -440,7 +452,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -455,7 +467,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -483,7 +495,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -504,7 +516,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -594,12 +606,6 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -618,6 +624,18 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -774,6 +792,12 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -796,7 +820,7 @@ func (s *FloatRangeProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/flushstats.go b/typedapi/types/flushstats.go index 48c514de19..ac5a343b16 100644 --- a/typedapi/types/flushstats.go +++ b/typedapi/types/flushstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FlushStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L123-L128 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L123-L128 type FlushStats struct { Periodic int64 `json:"periodic"` Total int64 `json:"total"` @@ -55,7 +55,7 @@ func (s *FlushStats) UnmarshalJSON(data []byte) error { switch t { case "periodic": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *FlushStats) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/followerindex.go b/typedapi/types/followerindex.go index 700bf2a1e0..ca6679b5c7 100644 --- a/typedapi/types/followerindex.go +++ b/typedapi/types/followerindex.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // FollowerIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/follow_info/types.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/follow_info/types.ts#L22-L28 type FollowerIndex struct { FollowerIndex string `json:"follower_index"` LeaderIndex string `json:"leader_index"` diff --git a/typedapi/types/followerindexparameters.go b/typedapi/types/followerindexparameters.go index a452c8f741..71e451240f 100644 --- a/typedapi/types/followerindexparameters.go +++ b/typedapi/types/followerindexparameters.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FollowerIndexParameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/follow_info/types.ts#L38-L49 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/follow_info/types.ts#L38-L49 type FollowerIndexParameters struct { MaxOutstandingReadRequests int `json:"max_outstanding_read_requests"` MaxOutstandingWriteRequests int `json:"max_outstanding_write_requests"` @@ -62,7 +62,7 @@ func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { case "max_outstanding_read_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -78,7 +78,7 @@ func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { case "max_outstanding_write_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -94,7 +94,7 @@ func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { case "max_read_request_operation_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -127,7 +127,7 @@ func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { case "max_write_buffer_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -155,7 +155,7 @@ func (s *FollowerIndexParameters) UnmarshalJSON(data []byte) error { case "max_write_request_operation_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/followindexstats.go b/typedapi/types/followindexstats.go index 8731dae773..d82342f440 100644 --- a/typedapi/types/followindexstats.go +++ b/typedapi/types/followindexstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // FollowIndexStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/_types/FollowIndexStats.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/_types/FollowIndexStats.ts#L30-L33 type FollowIndexStats struct { Index string `json:"index"` Shards []CcrShardStats `json:"shards"` diff --git a/typedapi/types/followstats.go b/typedapi/types/followstats.go index 2396601612..cccb2dd47e 100644 --- a/typedapi/types/followstats.go +++ b/typedapi/types/followstats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // FollowStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/stats/types.ts.ts#L41-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/stats/types.ts.ts#L41-L43 type FollowStats struct { Indices []FollowIndexStats `json:"indices"` } diff --git a/typedapi/types/forcemergeconfiguration.go b/typedapi/types/forcemergeconfiguration.go index 305ed48e1a..3db5890d56 100644 --- a/typedapi/types/forcemergeconfiguration.go +++ b/typedapi/types/forcemergeconfiguration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ForceMergeConfiguration type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/_types/Phase.ts#L56-L58 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/_types/Phase.ts#L56-L58 type ForceMergeConfiguration struct { MaxNumSegments int `json:"max_num_segments"` } @@ -53,7 +53,7 @@ func (s *ForceMergeConfiguration) UnmarshalJSON(data []byte) error { case "max_num_segments": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/forcemergeresponsebody.go b/typedapi/types/forcemergeresponsebody.go index 5d8b099701..9e8ac5678a 100644 --- a/typedapi/types/forcemergeresponsebody.go +++ b/typedapi/types/forcemergeresponsebody.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ForceMergeResponseBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/forcemerge/_types/response.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/forcemerge/_types/response.ts#L22-L28 type ForceMergeResponseBody struct { Shards_ ShardStatistics `json:"_shards"` // Task task contains a task id returned when wait_for_completion=false, diff --git a/typedapi/types/foreachprocessor.go b/typedapi/types/foreachprocessor.go index edd991fa9e..667598f303 100644 --- a/typedapi/types/foreachprocessor.go +++ b/typedapi/types/foreachprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ForeachProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L656-L670 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L656-L670 type ForeachProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -99,7 +99,7 @@ func (s *ForeachProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,7 +113,7 @@ func (s *ForeachProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/formattablemetricaggregation.go b/typedapi/types/formattablemetricaggregation.go index 52cc415b1b..9172798af3 100644 --- a/typedapi/types/formattablemetricaggregation.go +++ b/typedapi/types/formattablemetricaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FormattableMetricAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L51-L53 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L51-L53 type FormattableMetricAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` diff --git a/typedapi/types/foundstatus.go b/typedapi/types/foundstatus.go index 1605fbb5e7..eb7c50bcbd 100644 --- a/typedapi/types/foundstatus.go +++ b/typedapi/types/foundstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FoundStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/delete_privileges/types.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/delete_privileges/types.ts#L20-L22 type FoundStatus struct { Found bool `json:"found"` } @@ -52,7 +52,7 @@ func (s *FoundStatus) UnmarshalJSON(data []byte) error { switch t { case "found": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/frequencyencodingpreprocessor.go b/typedapi/types/frequencyencodingpreprocessor.go index daf27fa572..3b50f762ab 100644 --- a/typedapi/types/frequencyencodingpreprocessor.go +++ b/typedapi/types/frequencyencodingpreprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FrequencyEncodingPreprocessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model/types.ts#L38-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model/types.ts#L38-L42 type FrequencyEncodingPreprocessor struct { FeatureName string `json:"feature_name"` Field string `json:"field"` diff --git a/typedapi/types/frequentitemsetsaggregate.go b/typedapi/types/frequentitemsetsaggregate.go index 745deca5f1..4f8def5049 100644 --- a/typedapi/types/frequentitemsetsaggregate.go +++ b/typedapi/types/frequentitemsetsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // FrequentItemSetsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L639-L640 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L643-L644 type FrequentItemSetsAggregate struct { Buckets BucketsFrequentItemSetsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/frequentitemsetsaggregation.go b/typedapi/types/frequentitemsetsaggregation.go index a994da996e..1662bdc7ea 100644 --- a/typedapi/types/frequentitemsetsaggregation.go +++ b/typedapi/types/frequentitemsetsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FrequentItemSetsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L1159-L1183 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L1167-L1191 type FrequentItemSetsAggregation struct { // Fields Fields to analyze. Fields []FrequentItemSetsField `json:"fields"` @@ -72,7 +72,7 @@ func (s *FrequentItemSetsAggregation) UnmarshalJSON(data []byte) error { case "minimum_set_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *FrequentItemSetsAggregation) UnmarshalJSON(data []byte) error { } case "minimum_support": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -104,7 +104,7 @@ func (s *FrequentItemSetsAggregation) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/frequentitemsetsbucket.go b/typedapi/types/frequentitemsetsbucket.go index 8258b0023a..3de0921099 100644 --- a/typedapi/types/frequentitemsetsbucket.go +++ b/typedapi/types/frequentitemsetsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // FrequentItemSetsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L642-L645 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L646-L649 type FrequentItemSetsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -56,7 +56,7 @@ func (s *FrequentItemSetsBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -79,7 +79,7 @@ func (s *FrequentItemSetsBucket) UnmarshalJSON(data []byte) error { } case "support": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -539,7 +539,7 @@ func (s *FrequentItemSetsBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -589,7 +589,7 @@ func (s *FrequentItemSetsBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -599,7 +599,7 @@ func (s *FrequentItemSetsBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -616,7 +616,7 @@ func (s *FrequentItemSetsBucket) UnmarshalJSON(data []byte) error { func (s FrequentItemSetsBucket) MarshalJSON() ([]byte, error) { type opt FrequentItemSetsBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/frequentitemsetsfield.go b/typedapi/types/frequentitemsetsfield.go index 751d8683c2..69bf779266 100644 --- a/typedapi/types/frequentitemsetsfield.go +++ b/typedapi/types/frequentitemsetsfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // FrequentItemSetsField type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L1145-L1157 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L1153-L1165 type FrequentItemSetsField struct { // Exclude Values to exclude. // Can be regular expression strings or arrays of strings of exact terms. diff --git a/typedapi/types/frozenindices.go b/typedapi/types/frozenindices.go index fe42f852fe..58b1cd39f5 100644 --- a/typedapi/types/frozenindices.go +++ b/typedapi/types/frozenindices.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FrozenIndices type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L360-L362 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L360-L362 type FrozenIndices struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -54,7 +54,7 @@ func (s *FrozenIndices) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -68,7 +68,7 @@ func (s *FrozenIndices) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -82,7 +82,7 @@ func (s *FrozenIndices) UnmarshalJSON(data []byte) error { } case "indices_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/functionscore.go b/typedapi/types/functionscore.go index c3dcdbc8a5..fb49f579ca 100644 --- a/typedapi/types/functionscore.go +++ b/typedapi/types/functionscore.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FunctionScore type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L201-L241 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L210-L250 type FunctionScore struct { // Exp Function that scores a document with a exponential decay, depending on the // distance of a numeric field value of the document from an origin. @@ -110,7 +110,7 @@ func (s *FunctionScore) UnmarshalJSON(data []byte) error { } case "weight": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/functionscorequery.go b/typedapi/types/functionscorequery.go index 781d269469..09e40fc36f 100644 --- a/typedapi/types/functionscorequery.go +++ b/typedapi/types/functionscorequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // FunctionScoreQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L92-L118 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L93-L119 type FunctionScoreQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -74,7 +74,7 @@ func (s *FunctionScoreQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *FunctionScoreQuery) UnmarshalJSON(data []byte) error { } case "max_boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -116,7 +116,7 @@ func (s *FunctionScoreQuery) UnmarshalJSON(data []byte) error { } case "min_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/fuzziness.go b/typedapi/types/fuzziness.go index 66c14565e0..aeaaadfaef 100644 --- a/typedapi/types/fuzziness.go +++ b/typedapi/types/fuzziness.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // string // int // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L133-L134 -type Fuzziness interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L133-L134 +type Fuzziness any diff --git a/typedapi/types/fuzzyquery.go b/typedapi/types/fuzzyquery.go index 9ab216f958..c8f39713cc 100644 --- a/typedapi/types/fuzzyquery.go +++ b/typedapi/types/fuzzyquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // FuzzyQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L43-L78 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L44-L79 type FuzzyQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -80,7 +80,7 @@ func (s *FuzzyQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -102,7 +102,7 @@ func (s *FuzzyQuery) UnmarshalJSON(data []byte) error { case "max_expansions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -118,7 +118,7 @@ func (s *FuzzyQuery) UnmarshalJSON(data []byte) error { case "prefix_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -150,7 +150,7 @@ func (s *FuzzyQuery) UnmarshalJSON(data []byte) error { } case "transpositions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/garbagecollector.go b/typedapi/types/garbagecollector.go index d527301924..d0f3eb1d29 100644 --- a/typedapi/types/garbagecollector.go +++ b/typedapi/types/garbagecollector.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // GarbageCollector type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L923-L928 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L923-L928 type GarbageCollector struct { // Collectors Contains statistics about JVM garbage collectors for the node. Collectors map[string]GarbageCollectorTotal `json:"collectors,omitempty"` diff --git a/typedapi/types/garbagecollectortotal.go b/typedapi/types/garbagecollectortotal.go index 35571339fc..bf2504d5f1 100644 --- a/typedapi/types/garbagecollectortotal.go +++ b/typedapi/types/garbagecollectortotal.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GarbageCollectorTotal type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L930-L943 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L930-L943 type GarbageCollectorTotal struct { // CollectionCount Total number of JVM garbage collectors that collect objects. CollectionCount *int64 `json:"collection_count,omitempty"` @@ -57,7 +57,7 @@ func (s *GarbageCollectorTotal) UnmarshalJSON(data []byte) error { switch t { case "collection_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -84,7 +84,7 @@ func (s *GarbageCollectorTotal) UnmarshalJSON(data []byte) error { s.CollectionTime = &o case "collection_time_in_millis": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/gcsrepository.go b/typedapi/types/gcsrepository.go index 90372420c3..d8a51e0d1c 100644 --- a/typedapi/types/gcsrepository.go +++ b/typedapi/types/gcsrepository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // GcsRepository type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotRepository.ts#L45-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotRepository.ts#L45-L48 type GcsRepository struct { Settings GcsRepositorySettings `json:"settings"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/gcsrepositorysettings.go b/typedapi/types/gcsrepositorysettings.go index 6aab12eb97..2db1f5e3e1 100644 --- a/typedapi/types/gcsrepositorysettings.go +++ b/typedapi/types/gcsrepositorysettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GcsRepositorySettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotRepository.ts#L85-L91 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotRepository.ts#L85-L91 type GcsRepositorySettings struct { ApplicationName *string `json:"application_name,omitempty"` BasePath *string `json:"base_path,omitempty"` @@ -113,7 +113,7 @@ func (s *GcsRepositorySettings) UnmarshalJSON(data []byte) error { s.Client = &o case "compress": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -137,7 +137,7 @@ func (s *GcsRepositorySettings) UnmarshalJSON(data []byte) error { } case "readonly": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/geoboundingboxquery.go b/typedapi/types/geoboundingboxquery.go index 4b352abe84..0c4f128ad8 100644 --- a/typedapi/types/geoboundingboxquery.go +++ b/typedapi/types/geoboundingboxquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // GeoBoundingBoxQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/geo.ts#L32-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/geo.ts#L32-L53 type GeoBoundingBoxQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -71,7 +71,7 @@ func (s *GeoBoundingBoxQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *GeoBoundingBoxQuery) UnmarshalJSON(data []byte) error { } case "ignore_unmapped": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -144,7 +144,7 @@ func (s *GeoBoundingBoxQuery) UnmarshalJSON(data []byte) error { func (s GeoBoundingBoxQuery) MarshalJSON() ([]byte, error) { type opt GeoBoundingBoxQuery // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/geobounds.go b/typedapi/types/geobounds.go index 703e718f0e..efc744cd31 100644 --- a/typedapi/types/geobounds.go +++ b/typedapi/types/geobounds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -27,5 +27,5 @@ package types // TopRightBottomLeftGeoBounds // WktGeoBounds // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Geo.ts#L135-L148 -type GeoBounds interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Geo.ts#L135-L148 +type GeoBounds any diff --git a/typedapi/types/geoboundsaggregate.go b/typedapi/types/geoboundsaggregate.go index 3883b59d84..fa0fa20509 100644 --- a/typedapi/types/geoboundsaggregate.go +++ b/typedapi/types/geoboundsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // GeoBoundsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L303-L306 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L303-L306 type GeoBoundsAggregate struct { Bounds GeoBounds `json:"bounds,omitempty"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/geoboundsaggregation.go b/typedapi/types/geoboundsaggregation.go index 4a4f7e85d2..c3d89c8bc2 100644 --- a/typedapi/types/geoboundsaggregation.go +++ b/typedapi/types/geoboundsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GeoBoundsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L108-L114 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L108-L114 type GeoBoundsAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -106,7 +106,7 @@ func (s *GeoBoundsAggregation) UnmarshalJSON(data []byte) error { } case "wrap_longitude": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/geocentroidaggregate.go b/typedapi/types/geocentroidaggregate.go index ec1cdb3abf..4268891e13 100644 --- a/typedapi/types/geocentroidaggregate.go +++ b/typedapi/types/geocentroidaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GeoCentroidAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L308-L312 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L308-L312 type GeoCentroidAggregate struct { Count int64 `json:"count"` Location GeoLocation `json:"location,omitempty"` @@ -54,7 +54,7 @@ func (s *GeoCentroidAggregate) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/geocentroidaggregation.go b/typedapi/types/geocentroidaggregation.go index bd0afe5bc5..07dae8ee7e 100644 --- a/typedapi/types/geocentroidaggregation.go +++ b/typedapi/types/geocentroidaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GeoCentroidAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L116-L119 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L116-L119 type GeoCentroidAggregation struct { Count *int64 `json:"count,omitempty"` // Field The field on which to run the aggregation. @@ -59,7 +59,7 @@ func (s *GeoCentroidAggregation) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/geodecayfunction.go b/typedapi/types/geodecayfunction.go index 2cb20447d1..a8e99c3140 100644 --- a/typedapi/types/geodecayfunction.go +++ b/typedapi/types/geodecayfunction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -29,9 +29,9 @@ import ( // GeoDecayFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L190-L192 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L194-L197 type GeoDecayFunction struct { - GeoDecayFunction map[string]DecayPlacementGeoLocationDistance `json:"-"` + DecayFunctionBaseGeoLocationDistance map[string]DecayPlacementGeoLocationDistance `json:"-"` // MultiValueMode Determines how the distance is calculated when a field used for computing the // decay contains multiple values. MultiValueMode *multivaluemode.MultiValueMode `json:"multi_value_mode,omitempty"` @@ -41,7 +41,7 @@ type GeoDecayFunction struct { func (s GeoDecayFunction) MarshalJSON() ([]byte, error) { type opt GeoDecayFunction // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { @@ -53,10 +53,10 @@ func (s GeoDecayFunction) MarshalJSON() ([]byte, error) { } // We inline the additional fields from the underlying map - for key, value := range s.GeoDecayFunction { + for key, value := range s.DecayFunctionBaseGeoLocationDistance { tmp[fmt.Sprintf("%s", key)] = value } - delete(tmp, "GeoDecayFunction") + delete(tmp, "DecayFunctionBaseGeoLocationDistance") data, err = json.Marshal(tmp) if err != nil { @@ -69,7 +69,7 @@ func (s GeoDecayFunction) MarshalJSON() ([]byte, error) { // NewGeoDecayFunction returns a GeoDecayFunction. func NewGeoDecayFunction() *GeoDecayFunction { r := &GeoDecayFunction{ - GeoDecayFunction: make(map[string]DecayPlacementGeoLocationDistance, 0), + DecayFunctionBaseGeoLocationDistance: make(map[string]DecayPlacementGeoLocationDistance, 0), } return r diff --git a/typedapi/types/geodistanceaggregate.go b/typedapi/types/geodistanceaggregate.go index 642f3e5d79..0eaee3626c 100644 --- a/typedapi/types/geodistanceaggregate.go +++ b/typedapi/types/geodistanceaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // GeoDistanceAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L550-L554 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L554-L558 type GeoDistanceAggregate struct { Buckets BucketsRangeBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/geodistanceaggregation.go b/typedapi/types/geodistanceaggregation.go index b2722462ea..dbb458f5ab 100644 --- a/typedapi/types/geodistanceaggregation.go +++ b/typedapi/types/geodistanceaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,6 @@ import ( "errors" "fmt" "io" - "strconv" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/distanceunit" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/geodistancetype" @@ -34,14 +33,12 @@ import ( // GeoDistanceAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L380-L403 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L382-L405 type GeoDistanceAggregation struct { // DistanceType The distance calculation type. DistanceType *geodistancetype.GeoDistanceType `json:"distance_type,omitempty"` // Field A field of type `geo_point` used to evaluate the distance. - Field *string `json:"field,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + Field *string `json:"field,omitempty"` // Origin The origin used to evaluate the distance. Origin GeoLocation `json:"origin,omitempty"` // Ranges An array of ranges used to bucket documents. @@ -75,23 +72,6 @@ func (s *GeoDistanceAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Field", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "origin": if err := dec.Decode(&s.Origin); err != nil { return fmt.Errorf("%s | %w", "Origin", err) diff --git a/typedapi/types/geodistancefeaturequery.go b/typedapi/types/geodistancefeaturequery.go index ee30efbb33..23fb54f3aa 100644 --- a/typedapi/types/geodistancefeaturequery.go +++ b/typedapi/types/geodistancefeaturequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GeoDistanceFeatureQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L62-L65 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L67-L70 type GeoDistanceFeatureQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -78,7 +78,7 @@ func (s *GeoDistanceFeatureQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/geodistancequery.go b/typedapi/types/geodistancequery.go index a8a4bf8731..0af88544d5 100644 --- a/typedapi/types/geodistancequery.go +++ b/typedapi/types/geodistancequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // GeoDistanceQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/geo.ts#L57-L85 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/geo.ts#L60-L91 type GeoDistanceQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -77,7 +77,7 @@ func (s *GeoDistanceQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -103,7 +103,7 @@ func (s *GeoDistanceQuery) UnmarshalJSON(data []byte) error { } case "ignore_unmapped": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -155,7 +155,7 @@ func (s *GeoDistanceQuery) UnmarshalJSON(data []byte) error { func (s GeoDistanceQuery) MarshalJSON() ([]byte, error) { type opt GeoDistanceQuery // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/geodistancesort.go b/typedapi/types/geodistancesort.go index 9b47c45ab5..dae8966586 100644 --- a/typedapi/types/geodistancesort.go +++ b/typedapi/types/geodistancesort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -36,7 +36,7 @@ import ( // GeoDistanceSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/sort.ts#L58-L66 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/sort.ts#L59-L70 type GeoDistanceSort struct { DistanceType *geodistancetype.GeoDistanceType `json:"distance_type,omitempty"` GeoDistanceSort map[string][]GeoLocation `json:"-"` @@ -67,7 +67,7 @@ func (s *GeoDistanceSort) UnmarshalJSON(data []byte) error { } case "ignore_unmapped": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -127,7 +127,7 @@ func (s *GeoDistanceSort) UnmarshalJSON(data []byte) error { func (s GeoDistanceSort) MarshalJSON() ([]byte, error) { type opt GeoDistanceSort // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/geohashgridaggregate.go b/typedapi/types/geohashgridaggregate.go index c08c672b8e..ae4e6f6292 100644 --- a/typedapi/types/geohashgridaggregate.go +++ b/typedapi/types/geohashgridaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // GeoHashGridAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L506-L508 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L510-L512 type GeoHashGridAggregate struct { Buckets BucketsGeoHashGridBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/geohashgridaggregation.go b/typedapi/types/geohashgridaggregation.go index 93194dbb4e..12d4103b45 100644 --- a/typedapi/types/geohashgridaggregation.go +++ b/typedapi/types/geohashgridaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,15 +31,13 @@ import ( // GeoHashGridAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L405-L430 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L407-L432 type GeoHashGridAggregation struct { // Bounds The bounding box to filter the points in each bucket. Bounds GeoBounds `json:"bounds,omitempty"` // Field Field containing indexed `geo_point` or `geo_shape` values. // If the field contains an array, `geohash_grid` aggregates all array values. - Field *string `json:"field,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + Field *string `json:"field,omitempty"` // Precision The string length of the geohashes used to define cells/buckets in the // results. Precision GeoHashPrecision `json:"precision,omitempty"` @@ -77,23 +75,6 @@ func (s *GeoHashGridAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Field", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "precision": if err := dec.Decode(&s.Precision); err != nil { return fmt.Errorf("%s | %w", "Precision", err) @@ -101,7 +82,7 @@ func (s *GeoHashGridAggregation) UnmarshalJSON(data []byte) error { case "shard_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -117,7 +98,7 @@ func (s *GeoHashGridAggregation) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/geohashgridbucket.go b/typedapi/types/geohashgridbucket.go index 6b4c935c32..6c2ddfa0d3 100644 --- a/typedapi/types/geohashgridbucket.go +++ b/typedapi/types/geohashgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // GeoHashGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L510-L512 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L514-L516 type GeoHashGridBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -55,7 +55,7 @@ func (s *GeoHashGridBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +519,7 @@ func (s *GeoHashGridBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -569,7 +569,7 @@ func (s *GeoHashGridBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -579,7 +579,7 @@ func (s *GeoHashGridBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -596,7 +596,7 @@ func (s *GeoHashGridBucket) UnmarshalJSON(data []byte) error { func (s GeoHashGridBucket) MarshalJSON() ([]byte, error) { type opt GeoHashGridBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/geohashlocation.go b/typedapi/types/geohashlocation.go index 9a64fa4626..4619218e25 100644 --- a/typedapi/types/geohashlocation.go +++ b/typedapi/types/geohashlocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // GeoHashLocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Geo.ts#L131-L133 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Geo.ts#L131-L133 type GeoHashLocation struct { Geohash string `json:"geohash"` } diff --git a/typedapi/types/geohashprecision.go b/typedapi/types/geohashprecision.go index a14c6155e5..8d70b237c8 100644 --- a/typedapi/types/geohashprecision.go +++ b/typedapi/types/geohashprecision.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // int // string // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Geo.ts#L86-L90 -type GeoHashPrecision interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Geo.ts#L86-L90 +type GeoHashPrecision any diff --git a/typedapi/types/geohexgridaggregate.go b/typedapi/types/geohexgridaggregate.go index 08e84de430..86291e8681 100644 --- a/typedapi/types/geohexgridaggregate.go +++ b/typedapi/types/geohexgridaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // GeoHexGridAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L522-L523 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L526-L527 type GeoHexGridAggregate struct { Buckets BucketsGeoHexGridBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/geohexgridaggregation.go b/typedapi/types/geohexgridaggregation.go index 67533b9bfe..987ea9364c 100644 --- a/typedapi/types/geohexgridaggregation.go +++ b/typedapi/types/geohexgridaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,15 +31,13 @@ import ( // GeohexGridAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L460-L485 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L462-L487 type GeohexGridAggregation struct { // Bounds Bounding box used to filter the geo-points in each bucket. Bounds GeoBounds `json:"bounds,omitempty"` // Field Field containing indexed `geo_point` or `geo_shape` values. // If the field contains an array, `geohex_grid` aggregates all array values. - Field string `json:"field"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + Field string `json:"field"` // Precision Integer zoom of the key used to defined cells or buckets // in the results. Value should be between 0-15. Precision *int `json:"precision,omitempty"` @@ -74,26 +72,9 @@ func (s *GeohexGridAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Field", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "precision": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -109,7 +90,7 @@ func (s *GeohexGridAggregation) UnmarshalJSON(data []byte) error { case "shard_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +106,7 @@ func (s *GeohexGridAggregation) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/geohexgridbucket.go b/typedapi/types/geohexgridbucket.go index bcc58157ae..c8b7c68214 100644 --- a/typedapi/types/geohexgridbucket.go +++ b/typedapi/types/geohexgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // GeoHexGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L525-L527 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L529-L531 type GeoHexGridBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -55,7 +55,7 @@ func (s *GeoHexGridBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +519,7 @@ func (s *GeoHexGridBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -569,7 +569,7 @@ func (s *GeoHexGridBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -579,7 +579,7 @@ func (s *GeoHexGridBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -596,7 +596,7 @@ func (s *GeoHexGridBucket) UnmarshalJSON(data []byte) error { func (s GeoHexGridBucket) MarshalJSON() ([]byte, error) { type opt GeoHexGridBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/geoipdownloadstatistics.go b/typedapi/types/geoipdownloadstatistics.go index 1c46e25c20..8933ac4632 100644 --- a/typedapi/types/geoipdownloadstatistics.go +++ b/typedapi/types/geoipdownloadstatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GeoIpDownloadStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/geo_ip_stats/types.ts#L24-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/geo_ip_stats/types.ts#L24-L35 type GeoIpDownloadStatistics struct { // DatabaseCount Current number of databases available for use. DatabaseCount int `json:"database_count"` @@ -62,7 +62,7 @@ func (s *GeoIpDownloadStatistics) UnmarshalJSON(data []byte) error { case "database_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -78,7 +78,7 @@ func (s *GeoIpDownloadStatistics) UnmarshalJSON(data []byte) error { case "failed_downloads": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -94,7 +94,7 @@ func (s *GeoIpDownloadStatistics) UnmarshalJSON(data []byte) error { case "skipped_updates": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -110,7 +110,7 @@ func (s *GeoIpDownloadStatistics) UnmarshalJSON(data []byte) error { case "successful_downloads": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/geoipnodedatabasename.go b/typedapi/types/geoipnodedatabasename.go index b7e754bf0e..333ce30e0b 100644 --- a/typedapi/types/geoipnodedatabasename.go +++ b/typedapi/types/geoipnodedatabasename.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // GeoIpNodeDatabaseName type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/geo_ip_stats/types.ts#L45-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/geo_ip_stats/types.ts#L45-L48 type GeoIpNodeDatabaseName struct { // Name Name of the database. Name string `json:"name"` diff --git a/typedapi/types/geoipnodedatabases.go b/typedapi/types/geoipnodedatabases.go index 8d35d4cd10..8c764c274b 100644 --- a/typedapi/types/geoipnodedatabases.go +++ b/typedapi/types/geoipnodedatabases.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // GeoIpNodeDatabases type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/geo_ip_stats/types.ts#L37-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/geo_ip_stats/types.ts#L37-L43 type GeoIpNodeDatabases struct { // Databases Downloaded databases for the node. Databases []GeoIpNodeDatabaseName `json:"databases"` diff --git a/typedapi/types/geoipprocessor.go b/typedapi/types/geoipprocessor.go index f8a3d034d2..804df2ca8d 100644 --- a/typedapi/types/geoipprocessor.go +++ b/typedapi/types/geoipprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GeoIpProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L339-L368 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L339-L368 type GeoIpProcessor struct { // DatabaseFile The database filename referring to a database the module ships with // (GeoLite2-City.mmdb, GeoLite2-Country.mmdb, or GeoLite2-ASN.mmdb) or a custom @@ -110,7 +110,7 @@ func (s *GeoIpProcessor) UnmarshalJSON(data []byte) error { } case "first_only": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -136,7 +136,7 @@ func (s *GeoIpProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -150,7 +150,7 @@ func (s *GeoIpProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/geoline.go b/typedapi/types/geoline.go index a27a6a4cce..0da6f2dc2b 100644 --- a/typedapi/types/geoline.go +++ b/typedapi/types/geoline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GeoLine type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Geo.ts#L56-L62 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Geo.ts#L56-L62 type GeoLine struct { // Coordinates Array of `[lon, lat]` coordinates Coordinates [][]Float64 `json:"coordinates"` diff --git a/typedapi/types/geolineaggregate.go b/typedapi/types/geolineaggregate.go index e64a5fb2df..ea5a29fe81 100644 --- a/typedapi/types/geolineaggregate.go +++ b/typedapi/types/geolineaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GeoLineAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L784-L791 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L791-L798 type GeoLineAggregate struct { Geometry GeoLine `json:"geometry"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/geolineaggregation.go b/typedapi/types/geolineaggregation.go index bd04f240c4..9d46a8b7da 100644 --- a/typedapi/types/geolineaggregation.go +++ b/typedapi/types/geolineaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // GeoLineAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L121-L146 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L121-L146 type GeoLineAggregation struct { // IncludeSort When `true`, returns an additional array of the sort values in the feature // properties. @@ -68,7 +68,7 @@ func (s *GeoLineAggregation) UnmarshalJSON(data []byte) error { switch t { case "include_sort": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *GeoLineAggregation) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/geolinepoint.go b/typedapi/types/geolinepoint.go index 3a2e7820bc..47369cb65f 100644 --- a/typedapi/types/geolinepoint.go +++ b/typedapi/types/geolinepoint.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // GeoLinePoint type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L155-L160 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L155-L160 type GeoLinePoint struct { // Field The name of the geo_point field. Field string `json:"field"` diff --git a/typedapi/types/geolinesort.go b/typedapi/types/geolinesort.go index 758c0d3f25..13f0bbd476 100644 --- a/typedapi/types/geolinesort.go +++ b/typedapi/types/geolinesort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // GeoLineSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L148-L153 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L148-L153 type GeoLineSort struct { // Field The name of the numeric field to use as the sort key for ordering the points. Field string `json:"field"` diff --git a/typedapi/types/geolocation.go b/typedapi/types/geolocation.go index ded39bec2e..f93b1f8fbd 100644 --- a/typedapi/types/geolocation.go +++ b/typedapi/types/geolocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -27,5 +27,5 @@ package types // []Float64 // string // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Geo.ts#L104-L118 -type GeoLocation interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Geo.ts#L104-L118 +type GeoLocation any diff --git a/typedapi/types/geopointproperty.go b/typedapi/types/geopointproperty.go index 5334539d37..7e7fedcdfd 100644 --- a/typedapi/types/geopointproperty.go +++ b/typedapi/types/geopointproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // GeoPointProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/geo.ts#L24-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/geo.ts#L24-L32 type GeoPointProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -87,7 +87,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -112,7 +112,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -133,7 +133,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -223,12 +223,6 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -247,6 +241,18 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -403,6 +409,12 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -414,7 +426,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -429,7 +441,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -443,7 +455,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { } case "ignore_z_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -457,7 +469,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -495,7 +507,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -516,7 +528,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -606,12 +618,6 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -630,6 +636,18 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -786,6 +804,12 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -844,7 +868,7 @@ func (s *GeoPointProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/geopolygonpoints.go b/typedapi/types/geopolygonpoints.go index 5539af190b..c73ea04bc1 100644 --- a/typedapi/types/geopolygonpoints.go +++ b/typedapi/types/geopolygonpoints.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // GeoPolygonPoints type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/geo.ts#L87-L89 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/geo.ts#L93-L95 type GeoPolygonPoints struct { Points []GeoLocation `json:"points"` } diff --git a/typedapi/types/geopolygonquery.go b/typedapi/types/geopolygonquery.go index 428c7ccf9e..8d05f6a9f2 100644 --- a/typedapi/types/geopolygonquery.go +++ b/typedapi/types/geopolygonquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // GeoPolygonQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/geo.ts#L91-L99 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/geo.ts#L97-L108 type GeoPolygonQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -63,7 +63,7 @@ func (s *GeoPolygonQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -79,7 +79,7 @@ func (s *GeoPolygonQuery) UnmarshalJSON(data []byte) error { } case "ignore_unmapped": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -131,7 +131,7 @@ func (s *GeoPolygonQuery) UnmarshalJSON(data []byte) error { func (s GeoPolygonQuery) MarshalJSON() ([]byte, error) { type opt GeoPolygonQuery // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/georesults.go b/typedapi/types/georesults.go index 87830d487a..bbca11ed0a 100644 --- a/typedapi/types/georesults.go +++ b/typedapi/types/georesults.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GeoResults type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Anomaly.ts#L145-L154 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Anomaly.ts#L145-L154 type GeoResults struct { // ActualPoint The actual value for the bucket formatted as a `geo_point`. ActualPoint string `json:"actual_point"` diff --git a/typedapi/types/geoshapefieldquery.go b/typedapi/types/geoshapefieldquery.go index 8240032ae7..5719a355fa 100644 --- a/typedapi/types/geoshapefieldquery.go +++ b/typedapi/types/geoshapefieldquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // GeoShapeFieldQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/geo.ts#L106-L117 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/geo.ts#L115-L126 type GeoShapeFieldQuery struct { // IndexedShape Query using an indexed shape retrieved from the the specified document and // path. diff --git a/typedapi/types/geoshapeproperty.go b/typedapi/types/geoshapeproperty.go index 1788684db1..547e692d86 100644 --- a/typedapi/types/geoshapeproperty.go +++ b/typedapi/types/geoshapeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // GeoShapeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/geo.ts#L41-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/geo.ts#L41-L54 type GeoShapeProperty struct { Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -71,7 +71,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { switch t { case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -101,7 +101,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -126,7 +126,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -147,7 +147,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -237,12 +237,6 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -261,6 +255,18 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -417,6 +423,12 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -428,7 +440,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -443,7 +455,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -457,7 +469,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { } case "ignore_z_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -490,7 +502,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -511,7 +523,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -601,12 +613,6 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -625,6 +631,18 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -781,6 +799,12 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -803,7 +827,7 @@ func (s *GeoShapeProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/geoshapequery.go b/typedapi/types/geoshapequery.go index 6fa9dcc092..4a5db83c86 100644 --- a/typedapi/types/geoshapequery.go +++ b/typedapi/types/geoshapequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GeoShapeQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/geo.ts#L121-L131 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/geo.ts#L128-L143 type GeoShapeQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -63,7 +63,7 @@ func (s *GeoShapeQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -79,7 +79,7 @@ func (s *GeoShapeQuery) UnmarshalJSON(data []byte) error { } case "ignore_unmapped": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -126,7 +126,7 @@ func (s *GeoShapeQuery) UnmarshalJSON(data []byte) error { func (s GeoShapeQuery) MarshalJSON() ([]byte, error) { type opt GeoShapeQuery // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/geotilegridaggregate.go b/typedapi/types/geotilegridaggregate.go index c799116597..b2622343c3 100644 --- a/typedapi/types/geotilegridaggregate.go +++ b/typedapi/types/geotilegridaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // GeoTileGridAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L514-L516 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L518-L520 type GeoTileGridAggregate struct { Buckets BucketsGeoTileGridBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/geotilegridaggregation.go b/typedapi/types/geotilegridaggregation.go index e5fbcd9671..078718c2f7 100644 --- a/typedapi/types/geotilegridaggregation.go +++ b/typedapi/types/geotilegridaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,15 +31,13 @@ import ( // GeoTileGridAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L432-L458 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L434-L460 type GeoTileGridAggregation struct { // Bounds A bounding box to filter the geo-points or geo-shapes in each bucket. Bounds GeoBounds `json:"bounds,omitempty"` // Field Field containing indexed `geo_point` or `geo_shape` values. // If the field contains an array, `geotile_grid` aggregates all array values. - Field *string `json:"field,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + Field *string `json:"field,omitempty"` // Precision Integer zoom of the key used to define cells/buckets in the results. // Values outside of the range [0,29] will be rejected. Precision *int `json:"precision,omitempty"` @@ -77,23 +75,6 @@ func (s *GeoTileGridAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Field", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "precision": if err := dec.Decode(&s.Precision); err != nil { return fmt.Errorf("%s | %w", "Precision", err) @@ -101,7 +82,7 @@ func (s *GeoTileGridAggregation) UnmarshalJSON(data []byte) error { case "shard_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -117,7 +98,7 @@ func (s *GeoTileGridAggregation) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/geotilegridbucket.go b/typedapi/types/geotilegridbucket.go index 98cd597597..a1986a333a 100644 --- a/typedapi/types/geotilegridbucket.go +++ b/typedapi/types/geotilegridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // GeoTileGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L518-L520 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L522-L524 type GeoTileGridBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -55,7 +55,7 @@ func (s *GeoTileGridBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +519,7 @@ func (s *GeoTileGridBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -569,7 +569,7 @@ func (s *GeoTileGridBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -579,7 +579,7 @@ func (s *GeoTileGridBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -596,7 +596,7 @@ func (s *GeoTileGridBucket) UnmarshalJSON(data []byte) error { func (s GeoTileGridBucket) MarshalJSON() ([]byte, error) { type opt GeoTileGridBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/getmigrationfeature.go b/typedapi/types/getmigrationfeature.go index fb598ff656..81a66d3c28 100644 --- a/typedapi/types/getmigrationfeature.go +++ b/typedapi/types/getmigrationfeature.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // GetMigrationFeature type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L37-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L37-L42 type GetMigrationFeature struct { FeatureName string `json:"feature_name"` Indices []MigrationFeatureIndexInfo `json:"indices"` diff --git a/typedapi/types/getresult.go b/typedapi/types/getresult.go index 09671f6768..81b90e6b1e 100644 --- a/typedapi/types/getresult.go +++ b/typedapi/types/getresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GetResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/get/types.ts#L25-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/get/types.ts#L25-L35 type GetResult struct { Fields map[string]json.RawMessage `json:"fields,omitempty"` Found bool `json:"found"` @@ -68,7 +68,7 @@ func (s *GetResult) UnmarshalJSON(data []byte) error { } case "found": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *GetResult) UnmarshalJSON(data []byte) error { } case "_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/getscriptcontext.go b/typedapi/types/getscriptcontext.go index 6f6d8cd400..deff3424c5 100644 --- a/typedapi/types/getscriptcontext.go +++ b/typedapi/types/getscriptcontext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // GetScriptContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/get_script_context/types.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/get_script_context/types.ts#L22-L25 type GetScriptContext struct { Methods []ContextMethod `json:"methods"` Name string `json:"name"` diff --git a/typedapi/types/getstats.go b/typedapi/types/getstats.go index c3cd5b7fce..a01e8358ee 100644 --- a/typedapi/types/getstats.go +++ b/typedapi/types/getstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GetStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L130-L141 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L130-L141 type GetStats struct { Current int64 `json:"current"` ExistsTime Duration `json:"exists_time,omitempty"` @@ -61,7 +61,7 @@ func (s *GetStats) UnmarshalJSON(data []byte) error { switch t { case "current": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -86,7 +86,7 @@ func (s *GetStats) UnmarshalJSON(data []byte) error { } case "exists_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -111,7 +111,7 @@ func (s *GetStats) UnmarshalJSON(data []byte) error { } case "missing_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -136,7 +136,7 @@ func (s *GetStats) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/getuserprofileerrors.go b/typedapi/types/getuserprofileerrors.go index 641e015fdf..302e1f1d37 100644 --- a/typedapi/types/getuserprofileerrors.go +++ b/typedapi/types/getuserprofileerrors.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GetUserProfileErrors type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_user_profile/types.ts#L25-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_user_profile/types.ts#L25-L28 type GetUserProfileErrors struct { Count int64 `json:"count"` Details map[string]ErrorCause `json:"details"` @@ -53,7 +53,7 @@ func (s *GetUserProfileErrors) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/globalaggregate.go b/typedapi/types/globalaggregate.go index f7ac649159..ad01cf3e75 100644 --- a/typedapi/types/globalaggregate.go +++ b/typedapi/types/globalaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // GlobalAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L492-L493 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L496-L497 type GlobalAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -55,7 +55,7 @@ func (s *GlobalAggregate) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +519,7 @@ func (s *GlobalAggregate) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -569,7 +569,7 @@ func (s *GlobalAggregate) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -579,7 +579,7 @@ func (s *GlobalAggregate) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -596,7 +596,7 @@ func (s *GlobalAggregate) UnmarshalJSON(data []byte) error { func (s GlobalAggregate) MarshalJSON() ([]byte, error) { type opt GlobalAggregate // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/globalaggregation.go b/typedapi/types/globalaggregation.go index 119cec53a9..8c0dde1728 100644 --- a/typedapi/types/globalaggregation.go +++ b/typedapi/types/globalaggregation.go @@ -16,62 +16,14 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "io" - "strconv" -) - // GlobalAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L487-L487 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L489-L489 type GlobalAggregation struct { - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` -} - -func (s *GlobalAggregation) UnmarshalJSON(data []byte) error { - - dec := json.NewDecoder(bytes.NewReader(data)) - - for { - t, err := dec.Token() - if err != nil { - if errors.Is(err, io.EOF) { - break - } - return err - } - - switch t { - - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - - } - } - return nil } // NewGlobalAggregation returns a GlobalAggregation. diff --git a/typedapi/types/globalprivilege.go b/typedapi/types/globalprivilege.go index d346b4e773..5ca0fa57d7 100644 --- a/typedapi/types/globalprivilege.go +++ b/typedapi/types/globalprivilege.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // GlobalPrivilege type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/Privileges.ts#L189-L191 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/Privileges.ts#L329-L331 type GlobalPrivilege struct { Application ApplicationGlobalUserPrivileges `json:"application"` } diff --git a/typedapi/types/googlenormalizeddistanceheuristic.go b/typedapi/types/googlenormalizeddistanceheuristic.go index b506cbebaf..b860cf8c03 100644 --- a/typedapi/types/googlenormalizeddistanceheuristic.go +++ b/typedapi/types/googlenormalizeddistanceheuristic.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GoogleNormalizedDistanceHeuristic type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L746-L751 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L748-L753 type GoogleNormalizedDistanceHeuristic struct { // BackgroundIsSuperset Set to `false` if you defined a custom background filter that represents a // different set of documents that you want to compare to. @@ -54,7 +54,7 @@ func (s *GoogleNormalizedDistanceHeuristic) UnmarshalJSON(data []byte) error { switch t { case "background_is_superset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/grantapikey.go b/typedapi/types/grantapikey.go index 7b4d26579a..9a7e23bcc5 100644 --- a/typedapi/types/grantapikey.go +++ b/typedapi/types/grantapikey.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // GrantApiKey type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/grant_api_key/types.ts#L25-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/grant_api_key/types.ts#L25-L46 type GrantApiKey struct { // Expiration Expiration time for the API key. By default, API keys never expire. Expiration *string `json:"expiration,omitempty"` diff --git a/typedapi/types/grokprocessor.go b/typedapi/types/grokprocessor.go index 4c527429da..c45da8d030 100644 --- a/typedapi/types/grokprocessor.go +++ b/typedapi/types/grokprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GrokProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L672-L697 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L672-L697 type GrokProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -108,7 +108,7 @@ func (s *GrokProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -122,7 +122,7 @@ func (s *GrokProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -166,7 +166,7 @@ func (s *GrokProcessor) UnmarshalJSON(data []byte) error { s.Tag = &o case "trace_match": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/groupings.go b/typedapi/types/groupings.go index a384244c59..dc26d38c02 100644 --- a/typedapi/types/groupings.go +++ b/typedapi/types/groupings.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Groupings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/_types/Groupings.ts#L24-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/_types/Groupings.ts#L24-L40 type Groupings struct { // DateHistogram A date histogram group aggregates a date field into time-based buckets. // This group is mandatory; you currently cannot roll up documents without a diff --git a/typedapi/types/gsubprocessor.go b/typedapi/types/gsubprocessor.go index 81ab7ad090..17733ad08b 100644 --- a/typedapi/types/gsubprocessor.go +++ b/typedapi/types/gsubprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // GsubProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L699-L723 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L699-L723 type GsubProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -104,7 +104,7 @@ func (s *GsubProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -118,7 +118,7 @@ func (s *GsubProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/halffloatnumberproperty.go b/typedapi/types/halffloatnumberproperty.go index c5ce495ff0..0b1f44de4d 100644 --- a/typedapi/types/halffloatnumberproperty.go +++ b/typedapi/types/halffloatnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // HalfFloatNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L139-L142 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L147-L150 type HalfFloatNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -79,7 +79,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -150,7 +150,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -171,7 +171,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -261,12 +261,6 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -285,6 +279,18 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -441,6 +447,12 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -452,7 +464,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -467,7 +479,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -481,7 +493,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -503,7 +515,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "null_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -530,7 +542,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -551,7 +563,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -641,12 +653,6 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -665,6 +671,18 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -821,6 +839,12 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -879,7 +903,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -893,7 +917,7 @@ func (s *HalfFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "time_series_dimension": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/haschildquery.go b/typedapi/types/haschildquery.go index 9a1118fe07..78691bb2b9 100644 --- a/typedapi/types/haschildquery.go +++ b/typedapi/types/haschildquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // HasChildQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/joining.ts#L41-L76 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/joining.ts#L41-L76 type HasChildQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -84,7 +84,7 @@ func (s *HasChildQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *HasChildQuery) UnmarshalJSON(data []byte) error { } case "ignore_unmapped": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -120,7 +120,7 @@ func (s *HasChildQuery) UnmarshalJSON(data []byte) error { case "max_children": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -136,7 +136,7 @@ func (s *HasChildQuery) UnmarshalJSON(data []byte) error { case "min_children": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/hasparentquery.go b/typedapi/types/hasparentquery.go index 5c07438401..47bdcda82b 100644 --- a/typedapi/types/hasparentquery.go +++ b/typedapi/types/hasparentquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // HasParentQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/joining.ts#L78-L104 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/joining.ts#L78-L104 type HasParentQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -74,7 +74,7 @@ func (s *HasParentQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -90,7 +90,7 @@ func (s *HasParentQuery) UnmarshalJSON(data []byte) error { } case "ignore_unmapped": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -131,7 +131,7 @@ func (s *HasParentQuery) UnmarshalJSON(data []byte) error { s.QueryName_ = &o case "score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/hasprivilegesuserprofileerrors.go b/typedapi/types/hasprivilegesuserprofileerrors.go index 759a2db941..cf457eb626 100644 --- a/typedapi/types/hasprivilegesuserprofileerrors.go +++ b/typedapi/types/hasprivilegesuserprofileerrors.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // HasPrivilegesUserProfileErrors type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/has_privileges_user_profile/types.ts#L39-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/has_privileges_user_profile/types.ts#L39-L42 type HasPrivilegesUserProfileErrors struct { Count int64 `json:"count"` Details map[string]ErrorCause `json:"details"` @@ -53,7 +53,7 @@ func (s *HasPrivilegesUserProfileErrors) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/hdrmethod.go b/typedapi/types/hdrmethod.go index 857556d06d..f9bdb3b571 100644 --- a/typedapi/types/hdrmethod.go +++ b/typedapi/types/hdrmethod.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // HdrMethod type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L216-L221 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L216-L221 type HdrMethod struct { // NumberOfSignificantValueDigits Specifies the resolution of values for the histogram in number of significant // digits. @@ -55,7 +55,7 @@ func (s *HdrMethod) UnmarshalJSON(data []byte) error { case "number_of_significant_value_digits": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/hdrpercentileranksaggregate.go b/typedapi/types/hdrpercentileranksaggregate.go index 1fc5385a89..357df783ca 100644 --- a/typedapi/types/hdrpercentileranksaggregate.go +++ b/typedapi/types/hdrpercentileranksaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // HdrPercentileRanksAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L169-L170 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L169-L170 type HdrPercentileRanksAggregate struct { Meta Metadata `json:"meta,omitempty"` Values Percentiles `json:"values"` diff --git a/typedapi/types/hdrpercentilesaggregate.go b/typedapi/types/hdrpercentilesaggregate.go index dec65d78d2..6dfe16f2fa 100644 --- a/typedapi/types/hdrpercentilesaggregate.go +++ b/typedapi/types/hdrpercentilesaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // HdrPercentilesAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L166-L167 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L166-L167 type HdrPercentilesAggregate struct { Meta Metadata `json:"meta,omitempty"` Values Percentiles `json:"values"` diff --git a/typedapi/types/healthrecord.go b/typedapi/types/healthrecord.go index ef2967a46c..7ff9165525 100644 --- a/typedapi/types/healthrecord.go +++ b/typedapi/types/healthrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // HealthRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/health/types.ts#L23-L94 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/health/types.ts#L23-L94 type HealthRecord struct { // ActiveShardsPercent active number of shards in percent ActiveShardsPercent *string `json:"active_shards_percent,omitempty"` diff --git a/typedapi/types/healthresponsebody.go b/typedapi/types/healthresponsebody.go index bdbeeb1cf6..38ed425c9b 100644 --- a/typedapi/types/healthresponsebody.go +++ b/typedapi/types/healthresponsebody.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // HealthResponseBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/health/ClusterHealthResponse.ts#L39-L72 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/health/ClusterHealthResponse.ts#L39-L72 type HealthResponseBody struct { // ActivePrimaryShards The number of active primary shards. ActivePrimaryShards int `json:"active_primary_shards"` @@ -89,7 +89,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case "active_primary_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -105,7 +105,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case "active_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -131,7 +131,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case "delayed_unassigned_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -155,7 +155,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case "initializing_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -171,7 +171,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case "number_of_data_nodes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -187,7 +187,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case "number_of_in_flight_fetch": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -203,7 +203,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case "number_of_nodes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -219,7 +219,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case "number_of_pending_tasks": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -235,7 +235,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case "relocating_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -265,7 +265,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { } case "timed_out": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -280,7 +280,7 @@ func (s *HealthResponseBody) UnmarshalJSON(data []byte) error { case "unassigned_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/healthstatistics.go b/typedapi/types/healthstatistics.go index be5195f515..816c625305 100644 --- a/typedapi/types/healthstatistics.go +++ b/typedapi/types/healthstatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // HealthStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L153-L155 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L153-L155 type HealthStatistics struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -54,7 +54,7 @@ func (s *HealthStatistics) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -68,7 +68,7 @@ func (s *HealthStatistics) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/helprecord.go b/typedapi/types/helprecord.go index d77ae57721..3f361b2c01 100644 --- a/typedapi/types/helprecord.go +++ b/typedapi/types/helprecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // HelpRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/help/types.ts#L20-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/help/types.ts#L20-L22 type HelpRecord struct { Endpoint string `json:"endpoint"` } diff --git a/typedapi/types/highlight.go b/typedapi/types/highlight.go index 3bf124629f..54fb712a58 100644 --- a/typedapi/types/highlight.go +++ b/typedapi/types/highlight.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -38,7 +38,7 @@ import ( // Highlight type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/highlighting.ts#L153-L156 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/highlighting.ts#L153-L156 type Highlight struct { // BoundaryChars A string that contains each boundary character. BoundaryChars *string `json:"boundary_chars,omitempty"` @@ -145,7 +145,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case "boundary_max_scan": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -190,7 +190,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { } case "force_source": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -205,7 +205,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case "fragment_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -225,7 +225,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { } case "highlight_filter": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -245,7 +245,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case "max_analyzed_offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -261,7 +261,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case "max_fragment_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -277,7 +277,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case "no_match_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -293,7 +293,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case "number_of_fragments": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -322,7 +322,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { case "phrase_limit": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -347,7 +347,7 @@ func (s *Highlight) UnmarshalJSON(data []byte) error { } case "require_field_match": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/highlightfield.go b/typedapi/types/highlightfield.go index 84fd2091dc..de83fc2509 100644 --- a/typedapi/types/highlightfield.go +++ b/typedapi/types/highlightfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -37,7 +37,7 @@ import ( // HighlightField type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/highlighting.ts#L193-L197 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/highlighting.ts#L193-L197 type HighlightField struct { Analyzer Analyzer `json:"analyzer,omitempty"` // BoundaryChars A string that contains each boundary character. @@ -249,7 +249,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case "boundary_max_scan": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -281,7 +281,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { s.BoundaryScannerLocale = &o case "force_source": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -296,7 +296,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case "fragment_offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -312,7 +312,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case "fragment_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -332,7 +332,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { } case "highlight_filter": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -368,7 +368,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case "max_analyzed_offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -384,7 +384,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case "max_fragment_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -400,7 +400,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case "no_match_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -416,7 +416,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case "number_of_fragments": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -445,7 +445,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { case "phrase_limit": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -470,7 +470,7 @@ func (s *HighlightField) UnmarshalJSON(data []byte) error { } case "require_field_match": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/hint.go b/typedapi/types/hint.go index 6146df80a9..7458ed3317 100644 --- a/typedapi/types/hint.go +++ b/typedapi/types/hint.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Hint type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/suggest_user_profiles/types.ts#L23-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/suggest_user_profiles/types.ts#L23-L34 type Hint struct { // Labels A single key-value pair to match against the labels section // of a profile. A profile is considered matching if it matches diff --git a/typedapi/types/histogramaggregate.go b/typedapi/types/histogramaggregate.go index 52bf7592c7..eb5712d305 100644 --- a/typedapi/types/histogramaggregate.go +++ b/typedapi/types/histogramaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // HistogramAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L340-L341 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L342-L343 type HistogramAggregate struct { Buckets BucketsHistogramBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/histogramaggregation.go b/typedapi/types/histogramaggregation.go index 995c210c39..658397ade0 100644 --- a/typedapi/types/histogramaggregation.go +++ b/typedapi/types/histogramaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // HistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L500-L546 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L502-L548 type HistogramAggregation struct { // ExtendedBounds Enables extending the bounds of the histogram beyond the data itself. ExtendedBounds *ExtendedBoundsdouble `json:"extended_bounds,omitempty"` @@ -49,15 +49,13 @@ type HistogramAggregation struct { Interval *Float64 `json:"interval,omitempty"` // Keyed If `true`, returns buckets as a hash instead of an array, keyed by the bucket // keys. - Keyed *bool `json:"keyed,omitempty"` - Meta Metadata `json:"meta,omitempty"` + Keyed *bool `json:"keyed,omitempty"` // MinDocCount Only returns buckets that have `min_doc_count` number of documents. // By default, the response will fill gaps in the histogram with empty buckets. MinDocCount *int `json:"min_doc_count,omitempty"` // Missing The value to apply to documents that do not have a value. // By default, documents without a value are ignored. Missing *Float64 `json:"missing,omitempty"` - Name *string `json:"name,omitempty"` // Offset By default, the bucket keys start with 0 and then continue in even spaced // steps of `interval`. // The bucket boundaries can be shifted by using the `offset` option. @@ -111,7 +109,7 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { } case "interval": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -127,7 +125,7 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { } case "keyed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -140,14 +138,9 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { s.Keyed = &v } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "min_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -162,7 +155,7 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { } case "missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -177,20 +170,8 @@ func (s *HistogramAggregation) UnmarshalJSON(data []byte) error { s.Missing = &f } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/histogrambucket.go b/typedapi/types/histogrambucket.go index 2ea0a411bc..a1bd970e7d 100644 --- a/typedapi/types/histogrambucket.go +++ b/typedapi/types/histogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // HistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L343-L346 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L345-L348 type HistogramBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -56,7 +56,7 @@ func (s *HistogramBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *HistogramBucket) UnmarshalJSON(data []byte) error { } case "key": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -543,7 +543,7 @@ func (s *HistogramBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -593,7 +593,7 @@ func (s *HistogramBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -603,7 +603,7 @@ func (s *HistogramBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -620,7 +620,7 @@ func (s *HistogramBucket) UnmarshalJSON(data []byte) error { func (s HistogramBucket) MarshalJSON() ([]byte, error) { type opt HistogramBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/histogramgrouping.go b/typedapi/types/histogramgrouping.go index 8c1ae6a052..d7671fdd23 100644 --- a/typedapi/types/histogramgrouping.go +++ b/typedapi/types/histogramgrouping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // HistogramGrouping type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/_types/Groupings.ts#L84-L97 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/_types/Groupings.ts#L84-L97 type HistogramGrouping struct { // Fields The set of fields that you wish to build histograms for. // All fields specified must be some kind of numeric. @@ -77,7 +77,7 @@ func (s *HistogramGrouping) UnmarshalJSON(data []byte) error { } case "interval": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/histogramproperty.go b/typedapi/types/histogramproperty.go index fbe97f44bc..e25f87f842 100644 --- a/typedapi/types/histogramproperty.go +++ b/typedapi/types/histogramproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // HistogramProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/specialized.ts#L54-L57 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/specialized.ts#L60-L63 type HistogramProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -72,7 +72,7 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -93,7 +93,7 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -183,12 +183,6 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -207,6 +201,18 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -363,6 +369,12 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -374,7 +386,7 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -389,7 +401,7 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -417,7 +429,7 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -438,7 +450,7 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -528,12 +540,6 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -552,6 +558,18 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -708,6 +726,12 @@ func (s *HistogramProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { diff --git a/typedapi/types/hit.go b/typedapi/types/hit.go index a076c85d54..47607b755d 100644 --- a/typedapi/types/hit.go +++ b/typedapi/types/hit.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,12 +31,12 @@ import ( // Hit type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/hits.ts#L40-L64 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/hits.ts#L40-L64 type Hit struct { Explanation_ *Explanation `json:"_explanation,omitempty"` Fields map[string]json.RawMessage `json:"fields,omitempty"` Highlight map[string][]string `json:"highlight,omitempty"` - Id_ string `json:"_id"` + Id_ *string `json:"_id,omitempty"` IgnoredFieldValues map[string][]string `json:"ignored_field_values,omitempty"` Ignored_ []string `json:"_ignored,omitempty"` Index_ string `json:"_index"` @@ -46,7 +46,7 @@ type Hit struct { Node_ *string `json:"_node,omitempty"` PrimaryTerm_ *int64 `json:"_primary_term,omitempty"` Routing_ *string `json:"_routing,omitempty"` - Score_ Float64 `json:"_score,omitempty"` + Score_ *Float64 `json:"_score,omitempty"` SeqNo_ *int64 `json:"_seq_no,omitempty"` Shard_ *string `json:"_shard,omitempty"` Sort []FieldValue `json:"sort,omitempty"` @@ -144,7 +144,7 @@ func (s *Hit) UnmarshalJSON(data []byte) error { s.Node_ = &o case "_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/hitsevent.go b/typedapi/types/hitsevent.go index 4004632a18..6a625712ae 100644 --- a/typedapi/types/hitsevent.go +++ b/typedapi/types/hitsevent.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,17 +26,21 @@ import ( "errors" "fmt" "io" + "strconv" ) // HitsEvent type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/eql/_types/EqlHits.ts#L41-L49 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/eql/_types/EqlHits.ts#L41-L54 type HitsEvent struct { Fields map[string][]json.RawMessage `json:"fields,omitempty"` // Id_ Unique identifier for the event. This ID is only unique within the index. Id_ string `json:"_id"` // Index_ Name of the index containing the event. Index_ string `json:"_index"` + // Missing Set to `true` for events in a timespan-constrained sequence that do not meet + // a given condition. + Missing *bool `json:"missing,omitempty"` // Source_ Original JSON body passed for the event at index time. Source_ json.RawMessage `json:"_source,omitempty"` } @@ -74,6 +78,20 @@ func (s *HitsEvent) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Index_", err) } + case "missing": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Missing", err) + } + s.Missing = &value + case bool: + s.Missing = &v + } + case "_source": if err := dec.Decode(&s.Source_); err != nil { return fmt.Errorf("%s | %w", "Source_", err) diff --git a/typedapi/types/hitsmetadata.go b/typedapi/types/hitsmetadata.go index 419fbbb862..3391d2ec18 100644 --- a/typedapi/types/hitsmetadata.go +++ b/typedapi/types/hitsmetadata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,10 +30,10 @@ import ( // HitsMetadata type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/hits.ts#L66-L72 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/hits.ts#L66-L72 type HitsMetadata struct { - Hits []Hit `json:"hits"` - MaxScore Float64 `json:"max_score,omitempty"` + Hits []Hit `json:"hits"` + MaxScore *Float64 `json:"max_score,omitempty"` // Total Total hit count information, present only if `track_total_hits` wasn't // `false` in the search request. Total *TotalHits `json:"total,omitempty"` diff --git a/typedapi/types/hitssequence.go b/typedapi/types/hitssequence.go index 211c7cab4b..211ebe2b31 100644 --- a/typedapi/types/hitssequence.go +++ b/typedapi/types/hitssequence.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,13 +26,13 @@ import ( // HitsSequence type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/eql/_types/EqlHits.ts#L51-L59 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/eql/_types/EqlHits.ts#L56-L64 type HitsSequence struct { // Events Contains events matching the query. Each object represents a matching event. Events []HitsEvent `json:"events"` // JoinKeys Shared field values used to constrain matches in the sequence. These are // defined using the by keyword in the EQL query syntax. - JoinKeys []json.RawMessage `json:"join_keys"` + JoinKeys []json.RawMessage `json:"join_keys,omitempty"` } // NewHitsSequence returns a HitsSequence. diff --git a/typedapi/types/holtlinearmodelsettings.go b/typedapi/types/holtlinearmodelsettings.go index 067dda1f8d..4591fb2562 100644 --- a/typedapi/types/holtlinearmodelsettings.go +++ b/typedapi/types/holtlinearmodelsettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // HoltLinearModelSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L271-L274 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L271-L274 type HoltLinearModelSettings struct { Alpha *float32 `json:"alpha,omitempty"` Beta *float32 `json:"beta,omitempty"` @@ -53,7 +53,7 @@ func (s *HoltLinearModelSettings) UnmarshalJSON(data []byte) error { switch t { case "alpha": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -69,7 +69,7 @@ func (s *HoltLinearModelSettings) UnmarshalJSON(data []byte) error { } case "beta": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/holtmovingaverageaggregation.go b/typedapi/types/holtmovingaverageaggregation.go index 3c65e2ce6f..baac38176c 100644 --- a/typedapi/types/holtmovingaverageaggregation.go +++ b/typedapi/types/holtmovingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // HoltMovingAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L257-L260 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L257-L260 type HoltMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,10 +43,8 @@ type HoltMovingAverageAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` Minimize *bool `json:"minimize,omitempty"` Model string `json:"model,omitempty"` - Name *string `json:"name,omitempty"` Predict *int `json:"predict,omitempty"` Settings HoltLinearModelSettings `json:"settings"` Window *int `json:"window,omitempty"` @@ -89,13 +87,8 @@ func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "minimize": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,21 +106,9 @@ func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Model", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "predict": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -148,7 +129,7 @@ func (s *HoltMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "window": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -174,10 +155,8 @@ func (s HoltMovingAverageAggregation) MarshalJSON() ([]byte, error) { BucketsPath: s.BucketsPath, Format: s.Format, GapPolicy: s.GapPolicy, - Meta: s.Meta, Minimize: s.Minimize, Model: s.Model, - Name: s.Name, Predict: s.Predict, Settings: s.Settings, Window: s.Window, diff --git a/typedapi/types/holtwintersmodelsettings.go b/typedapi/types/holtwintersmodelsettings.go index 52b008ed39..20066f7049 100644 --- a/typedapi/types/holtwintersmodelsettings.go +++ b/typedapi/types/holtwintersmodelsettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // HoltWintersModelSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L275-L282 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L275-L282 type HoltWintersModelSettings struct { Alpha *float32 `json:"alpha,omitempty"` Beta *float32 `json:"beta,omitempty"` @@ -59,7 +59,7 @@ func (s *HoltWintersModelSettings) UnmarshalJSON(data []byte) error { switch t { case "alpha": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -75,7 +75,7 @@ func (s *HoltWintersModelSettings) UnmarshalJSON(data []byte) error { } case "beta": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *HoltWintersModelSettings) UnmarshalJSON(data []byte) error { } case "gamma": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -107,7 +107,7 @@ func (s *HoltWintersModelSettings) UnmarshalJSON(data []byte) error { } case "pad": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -122,7 +122,7 @@ func (s *HoltWintersModelSettings) UnmarshalJSON(data []byte) error { case "period": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/holtwintersmovingaverageaggregation.go b/typedapi/types/holtwintersmovingaverageaggregation.go index a421845c5c..06fa96cda7 100644 --- a/typedapi/types/holtwintersmovingaverageaggregation.go +++ b/typedapi/types/holtwintersmovingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // HoltWintersMovingAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L262-L265 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L262-L265 type HoltWintersMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,10 +43,8 @@ type HoltWintersMovingAverageAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` Minimize *bool `json:"minimize,omitempty"` Model string `json:"model,omitempty"` - Name *string `json:"name,omitempty"` Predict *int `json:"predict,omitempty"` Settings HoltWintersModelSettings `json:"settings"` Window *int `json:"window,omitempty"` @@ -89,13 +87,8 @@ func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "minimize": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,21 +106,9 @@ func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Model", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "predict": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -148,7 +129,7 @@ func (s *HoltWintersMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "window": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -174,10 +155,8 @@ func (s HoltWintersMovingAverageAggregation) MarshalJSON() ([]byte, error) { BucketsPath: s.BucketsPath, Format: s.Format, GapPolicy: s.GapPolicy, - Meta: s.Meta, Minimize: s.Minimize, Model: s.Model, - Name: s.Name, Predict: s.Predict, Settings: s.Settings, Window: s.Window, diff --git a/typedapi/types/hop.go b/typedapi/types/hop.go index c5f2eecf46..44a41f7eb7 100644 --- a/typedapi/types/hop.go +++ b/typedapi/types/hop.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Hop type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/graph/_types/Hop.ts#L23-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/graph/_types/Hop.ts#L23-L36 type Hop struct { // Connections Specifies one or more fields from which you want to extract terms that are // associated with the specified vertices. diff --git a/typedapi/types/hotthread.go b/typedapi/types/hotthread.go index d9d4255a41..db29333091 100644 --- a/typedapi/types/hotthread.go +++ b/typedapi/types/hotthread.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // HotThread type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/hot_threads/types.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/hot_threads/types.ts#L23-L28 type HotThread struct { Hosts []string `json:"hosts"` NodeId string `json:"node_id"` diff --git a/typedapi/types/hourandminute.go b/typedapi/types/hourandminute.go index 91b952252c..135aba704b 100644 --- a/typedapi/types/hourandminute.go +++ b/typedapi/types/hourandminute.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // HourAndMinute type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Schedule.ts#L105-L108 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Schedule.ts#L105-L108 type HourAndMinute struct { Hour []int `json:"hour"` Minute []int `json:"minute"` diff --git a/typedapi/types/hourlyschedule.go b/typedapi/types/hourlyschedule.go index ca447f173c..14d3ac0778 100644 --- a/typedapi/types/hourlyschedule.go +++ b/typedapi/types/hourlyschedule.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // HourlySchedule type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Schedule.ts#L47-L49 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Schedule.ts#L47-L49 type HourlySchedule struct { Minute []int `json:"minute"` } diff --git a/typedapi/types/htmlstripcharfilter.go b/typedapi/types/htmlstripcharfilter.go index 5d8b7022a0..2ed52471ae 100644 --- a/typedapi/types/htmlstripcharfilter.go +++ b/typedapi/types/htmlstripcharfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,10 +30,11 @@ import ( // HtmlStripCharFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/char_filters.ts#L43-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/char_filters.ts#L43-L46 type HtmlStripCharFilter struct { - Type string `json:"type,omitempty"` - Version *string `json:"version,omitempty"` + EscapedTags []string `json:"escaped_tags,omitempty"` + Type string `json:"type,omitempty"` + Version *string `json:"version,omitempty"` } func (s *HtmlStripCharFilter) UnmarshalJSON(data []byte) error { @@ -51,6 +52,11 @@ func (s *HtmlStripCharFilter) UnmarshalJSON(data []byte) error { switch t { + case "escaped_tags": + if err := dec.Decode(&s.EscapedTags); err != nil { + return fmt.Errorf("%s | %w", "EscapedTags", err) + } + case "type": if err := dec.Decode(&s.Type); err != nil { return fmt.Errorf("%s | %w", "Type", err) @@ -70,8 +76,9 @@ func (s *HtmlStripCharFilter) UnmarshalJSON(data []byte) error { func (s HtmlStripCharFilter) MarshalJSON() ([]byte, error) { type innerHtmlStripCharFilter HtmlStripCharFilter tmp := innerHtmlStripCharFilter{ - Type: s.Type, - Version: s.Version, + EscapedTags: s.EscapedTags, + Type: s.Type, + Version: s.Version, } tmp.Type = "html_strip" diff --git a/typedapi/types/http.go b/typedapi/types/http.go index 4649572968..99c5c4c2b6 100644 --- a/typedapi/types/http.go +++ b/typedapi/types/http.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Http type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L633-L647 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L633-L647 type Http struct { // Clients Information on current and recently-closed HTTP client connections. // Clients that have been closed longer than the @@ -66,7 +66,7 @@ func (s *Http) UnmarshalJSON(data []byte) error { case "current_open": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -81,7 +81,7 @@ func (s *Http) UnmarshalJSON(data []byte) error { } case "total_opened": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/httpemailattachment.go b/typedapi/types/httpemailattachment.go index bcccc93756..fe8fa3b875 100644 --- a/typedapi/types/httpemailattachment.go +++ b/typedapi/types/httpemailattachment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // HttpEmailAttachment type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L218-L222 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L218-L222 type HttpEmailAttachment struct { ContentType *string `json:"content_type,omitempty"` Inline *bool `json:"inline,omitempty"` @@ -66,7 +66,7 @@ func (s *HttpEmailAttachment) UnmarshalJSON(data []byte) error { s.ContentType = &o case "inline": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/httpheaders.go b/typedapi/types/httpheaders.go index a88338a9bc..f52c352896 100644 --- a/typedapi/types/httpheaders.go +++ b/typedapi/types/httpheaders.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // HttpHeaders type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L158-L158 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L158-L158 type HttpHeaders map[string][]string diff --git a/typedapi/types/httpinput.go b/typedapi/types/httpinput.go index 152114244d..3ae5bd567e 100644 --- a/typedapi/types/httpinput.go +++ b/typedapi/types/httpinput.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // HttpInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Input.ts#L44-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Input.ts#L44-L48 type HttpInput struct { Extract []string `json:"extract,omitempty"` Request *HttpInputRequestDefinition `json:"request,omitempty"` diff --git a/typedapi/types/httpinputauthentication.go b/typedapi/types/httpinputauthentication.go index 678c95e82f..54effac298 100644 --- a/typedapi/types/httpinputauthentication.go +++ b/typedapi/types/httpinputauthentication.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // HttpInputAuthentication type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Input.ts#L50-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Input.ts#L50-L52 type HttpInputAuthentication struct { Basic HttpInputBasicAuthentication `json:"basic"` } diff --git a/typedapi/types/httpinputbasicauthentication.go b/typedapi/types/httpinputbasicauthentication.go index 6296a169f1..ed16cac51e 100644 --- a/typedapi/types/httpinputbasicauthentication.go +++ b/typedapi/types/httpinputbasicauthentication.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // HttpInputBasicAuthentication type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Input.ts#L54-L57 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Input.ts#L54-L57 type HttpInputBasicAuthentication struct { Password string `json:"password"` Username string `json:"username"` diff --git a/typedapi/types/httpinputproxy.go b/typedapi/types/httpinputproxy.go index 4db8c005b7..8b75d24131 100644 --- a/typedapi/types/httpinputproxy.go +++ b/typedapi/types/httpinputproxy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // HttpInputProxy type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Input.ts#L67-L70 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Input.ts#L67-L70 type HttpInputProxy struct { Host string `json:"host"` Port uint `json:"port"` diff --git a/typedapi/types/httpinputrequestdefinition.go b/typedapi/types/httpinputrequestdefinition.go index 595d16ffbf..f232320899 100644 --- a/typedapi/types/httpinputrequestdefinition.go +++ b/typedapi/types/httpinputrequestdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // HttpInputRequestDefinition type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Input.ts#L72-L86 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Input.ts#L72-L86 type HttpInputRequestDefinition struct { Auth *HttpInputAuthentication `json:"auth,omitempty"` Body *string `json:"body,omitempty"` diff --git a/typedapi/types/httpinputrequestresult.go b/typedapi/types/httpinputrequestresult.go index 70fb69e74b..bb312163d3 100644 --- a/typedapi/types/httpinputrequestresult.go +++ b/typedapi/types/httpinputrequestresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // HttpInputRequestResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L300-L300 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L300-L300 type HttpInputRequestResult struct { Auth *HttpInputAuthentication `json:"auth,omitempty"` Body *string `json:"body,omitempty"` diff --git a/typedapi/types/httpinputresponseresult.go b/typedapi/types/httpinputresponseresult.go index 64993196fa..809ce28d57 100644 --- a/typedapi/types/httpinputresponseresult.go +++ b/typedapi/types/httpinputresponseresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // HttpInputResponseResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L302-L306 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L302-L306 type HttpInputResponseResult struct { Body string `json:"body"` Headers HttpHeaders `json:"headers"` @@ -72,7 +72,7 @@ func (s *HttpInputResponseResult) UnmarshalJSON(data []byte) error { case "status": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/hunspelltokenfilter.go b/typedapi/types/hunspelltokenfilter.go index 743bceb4bd..f86ddf46af 100644 --- a/typedapi/types/hunspelltokenfilter.go +++ b/typedapi/types/hunspelltokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // HunspellTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L200-L206 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L202-L208 type HunspellTokenFilter struct { Dedup *bool `json:"dedup,omitempty"` Dictionary *string `json:"dictionary,omitempty"` @@ -57,7 +57,7 @@ func (s *HunspellTokenFilter) UnmarshalJSON(data []byte) error { switch t { case "dedup": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *HunspellTokenFilter) UnmarshalJSON(data []byte) error { s.Locale = o case "longest_only": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/hyperparameter.go b/typedapi/types/hyperparameter.go index 68e076b65f..c5f349d506 100644 --- a/typedapi/types/hyperparameter.go +++ b/typedapi/types/hyperparameter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Hyperparameter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L217-L231 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L217-L231 type Hyperparameter struct { // AbsoluteImportance A positive number showing how much the parameter influences the variation of // the loss function. For hyperparameters with values that are not specified by @@ -67,7 +67,7 @@ func (s *Hyperparameter) UnmarshalJSON(data []byte) error { switch t { case "absolute_importance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *Hyperparameter) UnmarshalJSON(data []byte) error { } case "relative_importance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -104,7 +104,7 @@ func (s *Hyperparameter) UnmarshalJSON(data []byte) error { } case "supplied": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -118,7 +118,7 @@ func (s *Hyperparameter) UnmarshalJSON(data []byte) error { } case "value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/hyperparameters.go b/typedapi/types/hyperparameters.go index 3f4549b332..690e90d52b 100644 --- a/typedapi/types/hyperparameters.go +++ b/typedapi/types/hyperparameters.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Hyperparameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L419-L525 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L419-L525 type Hyperparameters struct { // Alpha Advanced configuration option. // Machine learning uses loss guided tree growing, which means that the decision @@ -148,7 +148,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { switch t { case "alpha": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -164,7 +164,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { } case "downsample_factor": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -180,7 +180,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { } case "eta": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -196,7 +196,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { } case "eta_growth_rate_per_tree": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -212,7 +212,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { } case "feature_bag_fraction": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -228,7 +228,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { } case "gamma": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -244,7 +244,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { } case "lambda": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -261,7 +261,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case "max_attempts_to_add_tree": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -277,7 +277,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case "max_optimization_rounds_per_hyperparameter": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -293,7 +293,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case "max_trees": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -309,7 +309,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case "num_folds": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -325,7 +325,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case "num_splits_per_feature": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -341,7 +341,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { case "soft_tree_depth_limit": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -356,7 +356,7 @@ func (s *Hyperparameters) UnmarshalJSON(data []byte) error { } case "soft_tree_depth_tolerance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/hyphenationdecompoundertokenfilter.go b/typedapi/types/hyphenationdecompoundertokenfilter.go index 535dd0d2ab..599caea02d 100644 --- a/typedapi/types/hyphenationdecompoundertokenfilter.go +++ b/typedapi/types/hyphenationdecompoundertokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // HyphenationDecompounderTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L58-L60 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L58-L60 type HyphenationDecompounderTokenFilter struct { HyphenationPatternsPath *string `json:"hyphenation_patterns_path,omitempty"` MaxSubwordSize *int `json:"max_subword_size,omitempty"` @@ -73,7 +73,7 @@ func (s *HyphenationDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case "max_subword_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -89,7 +89,7 @@ func (s *HyphenationDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case "min_subword_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -105,7 +105,7 @@ func (s *HyphenationDecompounderTokenFilter) UnmarshalJSON(data []byte) error { case "min_word_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -120,7 +120,7 @@ func (s *HyphenationDecompounderTokenFilter) UnmarshalJSON(data []byte) error { } case "only_longest_match": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/icuanalyzer.go b/typedapi/types/icuanalyzer.go index a3455a7c6d..830008c0fe 100644 --- a/typedapi/types/icuanalyzer.go +++ b/typedapi/types/icuanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -29,7 +29,7 @@ import ( // IcuAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/icu-plugin.ts#L67-L71 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/icu-plugin.ts#L67-L71 type IcuAnalyzer struct { Method icunormalizationtype.IcuNormalizationType `json:"method"` Mode icunormalizationmode.IcuNormalizationMode `json:"mode"` diff --git a/typedapi/types/icucollationproperty.go b/typedapi/types/icucollationproperty.go new file mode 100644 index 0000000000..45af896ae8 --- /dev/null +++ b/typedapi/types/icucollationproperty.go @@ -0,0 +1,1036 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" + + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/dynamicmapping" + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/icucollationalternate" + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/icucollationcasefirst" + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/icucollationdecomposition" + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/icucollationstrength" + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/indexoptions" +) + +// IcuCollationProperty type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/specialized.ts#L94-L118 +type IcuCollationProperty struct { + Alternate *icucollationalternate.IcuCollationAlternate `json:"alternate,omitempty"` + CaseFirst *icucollationcasefirst.IcuCollationCaseFirst `json:"case_first,omitempty"` + CaseLevel *bool `json:"case_level,omitempty"` + CopyTo []string `json:"copy_to,omitempty"` + Country *string `json:"country,omitempty"` + Decomposition *icucollationdecomposition.IcuCollationDecomposition `json:"decomposition,omitempty"` + DocValues *bool `json:"doc_values,omitempty"` + Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` + Fields map[string]Property `json:"fields,omitempty"` + HiraganaQuaternaryMode *bool `json:"hiragana_quaternary_mode,omitempty"` + IgnoreAbove *int `json:"ignore_above,omitempty"` + // Index Should the field be searchable? + Index *bool `json:"index,omitempty"` + IndexOptions *indexoptions.IndexOptions `json:"index_options,omitempty"` + Language *string `json:"language,omitempty"` + // Meta Metadata about the field. + Meta map[string]string `json:"meta,omitempty"` + Norms *bool `json:"norms,omitempty"` + // NullValue Accepts a string value which is substituted for any explicit null values. + // Defaults to null, which means the field is treated as missing. + NullValue *string `json:"null_value,omitempty"` + Numeric *bool `json:"numeric,omitempty"` + Properties map[string]Property `json:"properties,omitempty"` + Rules *string `json:"rules,omitempty"` + Similarity *string `json:"similarity,omitempty"` + Store *bool `json:"store,omitempty"` + Strength *icucollationstrength.IcuCollationStrength `json:"strength,omitempty"` + Type string `json:"type,omitempty"` + VariableTop *string `json:"variable_top,omitempty"` + Variant *string `json:"variant,omitempty"` +} + +func (s *IcuCollationProperty) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "alternate": + if err := dec.Decode(&s.Alternate); err != nil { + return fmt.Errorf("%s | %w", "Alternate", err) + } + + case "case_first": + if err := dec.Decode(&s.CaseFirst); err != nil { + return fmt.Errorf("%s | %w", "CaseFirst", err) + } + + case "case_level": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "CaseLevel", err) + } + s.CaseLevel = &value + case bool: + s.CaseLevel = &v + } + + case "copy_to": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "CopyTo", err) + } + + s.CopyTo = append(s.CopyTo, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.CopyTo); err != nil { + return fmt.Errorf("%s | %w", "CopyTo", err) + } + } + + case "country": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Country", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Country = &o + + case "decomposition": + if err := dec.Decode(&s.Decomposition); err != nil { + return fmt.Errorf("%s | %w", "Decomposition", err) + } + + case "doc_values": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "DocValues", err) + } + s.DocValues = &value + case bool: + s.DocValues = &v + } + + case "dynamic": + if err := dec.Decode(&s.Dynamic); err != nil { + return fmt.Errorf("%s | %w", "Dynamic", err) + } + + case "fields": + if s.Fields == nil { + s.Fields = make(map[string]Property, 0) + } + refs := make(map[string]json.RawMessage, 0) + dec.Decode(&refs) + for key, message := range refs { + kind := make(map[string]any) + buf := bytes.NewReader(message) + localDec := json.NewDecoder(buf) + localDec.Decode(&kind) + buf.Seek(0, io.SeekStart) + if _, ok := kind["type"]; !ok { + kind["type"] = "object" + } + switch kind["type"] { + case "binary": + oo := NewBinaryProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "boolean": + oo := NewBooleanProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "{dynamic_type}": + oo := NewDynamicProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "join": + oo := NewJoinProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "keyword": + oo := NewKeywordProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "match_only_text": + oo := NewMatchOnlyTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "percolator": + oo := NewPercolatorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "rank_feature": + oo := NewRankFeatureProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "rank_features": + oo := NewRankFeaturesProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "search_as_you_type": + oo := NewSearchAsYouTypeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "text": + oo := NewTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "version": + oo := NewVersionProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "wildcard": + oo := NewWildcardProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "date_nanos": + oo := NewDateNanosProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "date": + oo := NewDateProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "aggregate_metric_double": + oo := NewAggregateMetricDoubleProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "dense_vector": + oo := NewDenseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "flattened": + oo := NewFlattenedProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "nested": + oo := NewNestedProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "object": + oo := NewObjectProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "completion": + oo := NewCompletionProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "constant_keyword": + oo := NewConstantKeywordProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "alias": + oo := NewFieldAliasProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "histogram": + oo := NewHistogramProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "ip": + oo := NewIpProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "murmur3": + oo := NewMurmur3HashProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "token_count": + oo := NewTokenCountProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "geo_point": + oo := NewGeoPointProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "geo_shape": + oo := NewGeoShapeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "point": + oo := NewPointProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "shape": + oo := NewShapeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "byte": + oo := NewByteNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "double": + oo := NewDoubleNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "float": + oo := NewFloatNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "half_float": + oo := NewHalfFloatNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "integer": + oo := NewIntegerNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "long": + oo := NewLongNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "scaled_float": + oo := NewScaledFloatNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "short": + oo := NewShortNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "unsigned_long": + oo := NewUnsignedLongNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "date_range": + oo := NewDateRangeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "double_range": + oo := NewDoubleRangeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "float_range": + oo := NewFloatRangeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "integer_range": + oo := NewIntegerRangeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "ip_range": + oo := NewIpRangeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "long_range": + oo := NewLongRangeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + default: + oo := new(Property) + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + } + } + + case "hiragana_quaternary_mode": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "HiraganaQuaternaryMode", err) + } + s.HiraganaQuaternaryMode = &value + case bool: + s.HiraganaQuaternaryMode = &v + } + + case "ignore_above": + + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return fmt.Errorf("%s | %w", "IgnoreAbove", err) + } + s.IgnoreAbove = &value + case float64: + f := int(v) + s.IgnoreAbove = &f + } + + case "index": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Index", err) + } + s.Index = &value + case bool: + s.Index = &v + } + + case "index_options": + if err := dec.Decode(&s.IndexOptions); err != nil { + return fmt.Errorf("%s | %w", "IndexOptions", err) + } + + case "language": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Language", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Language = &o + + case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } + if err := dec.Decode(&s.Meta); err != nil { + return fmt.Errorf("%s | %w", "Meta", err) + } + + case "norms": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Norms", err) + } + s.Norms = &value + case bool: + s.Norms = &v + } + + case "null_value": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "NullValue", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.NullValue = &o + + case "numeric": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Numeric", err) + } + s.Numeric = &value + case bool: + s.Numeric = &v + } + + case "properties": + if s.Properties == nil { + s.Properties = make(map[string]Property, 0) + } + refs := make(map[string]json.RawMessage, 0) + dec.Decode(&refs) + for key, message := range refs { + kind := make(map[string]any) + buf := bytes.NewReader(message) + localDec := json.NewDecoder(buf) + localDec.Decode(&kind) + buf.Seek(0, io.SeekStart) + if _, ok := kind["type"]; !ok { + kind["type"] = "object" + } + switch kind["type"] { + case "binary": + oo := NewBinaryProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "boolean": + oo := NewBooleanProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "{dynamic_type}": + oo := NewDynamicProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "join": + oo := NewJoinProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "keyword": + oo := NewKeywordProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "match_only_text": + oo := NewMatchOnlyTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "percolator": + oo := NewPercolatorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "rank_feature": + oo := NewRankFeatureProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "rank_features": + oo := NewRankFeaturesProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "search_as_you_type": + oo := NewSearchAsYouTypeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "text": + oo := NewTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "version": + oo := NewVersionProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "wildcard": + oo := NewWildcardProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "date_nanos": + oo := NewDateNanosProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "date": + oo := NewDateProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "aggregate_metric_double": + oo := NewAggregateMetricDoubleProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "dense_vector": + oo := NewDenseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "flattened": + oo := NewFlattenedProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "nested": + oo := NewNestedProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "object": + oo := NewObjectProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "completion": + oo := NewCompletionProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "constant_keyword": + oo := NewConstantKeywordProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "alias": + oo := NewFieldAliasProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "histogram": + oo := NewHistogramProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "ip": + oo := NewIpProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "murmur3": + oo := NewMurmur3HashProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "token_count": + oo := NewTokenCountProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "geo_point": + oo := NewGeoPointProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "geo_shape": + oo := NewGeoShapeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "point": + oo := NewPointProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "shape": + oo := NewShapeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "byte": + oo := NewByteNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "double": + oo := NewDoubleNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "float": + oo := NewFloatNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "half_float": + oo := NewHalfFloatNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "integer": + oo := NewIntegerNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "long": + oo := NewLongNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "scaled_float": + oo := NewScaledFloatNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "short": + oo := NewShortNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "unsigned_long": + oo := NewUnsignedLongNumberProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "date_range": + oo := NewDateRangeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "double_range": + oo := NewDoubleRangeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "float_range": + oo := NewFloatRangeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "integer_range": + oo := NewIntegerRangeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "ip_range": + oo := NewIpRangeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "long_range": + oo := NewLongRangeProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + default: + oo := new(Property) + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + } + } + + case "rules": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Rules", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Rules = &o + + case "similarity": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Similarity", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Similarity = &o + + case "store": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Store", err) + } + s.Store = &value + case bool: + s.Store = &v + } + + case "strength": + if err := dec.Decode(&s.Strength); err != nil { + return fmt.Errorf("%s | %w", "Strength", err) + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return fmt.Errorf("%s | %w", "Type", err) + } + + case "variable_top": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "VariableTop", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.VariableTop = &o + + case "variant": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Variant", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Variant = &o + + } + } + return nil +} + +// MarshalJSON override marshalling to include literal value +func (s IcuCollationProperty) MarshalJSON() ([]byte, error) { + type innerIcuCollationProperty IcuCollationProperty + tmp := innerIcuCollationProperty{ + Alternate: s.Alternate, + CaseFirst: s.CaseFirst, + CaseLevel: s.CaseLevel, + CopyTo: s.CopyTo, + Country: s.Country, + Decomposition: s.Decomposition, + DocValues: s.DocValues, + Dynamic: s.Dynamic, + Fields: s.Fields, + HiraganaQuaternaryMode: s.HiraganaQuaternaryMode, + IgnoreAbove: s.IgnoreAbove, + Index: s.Index, + IndexOptions: s.IndexOptions, + Language: s.Language, + Meta: s.Meta, + Norms: s.Norms, + NullValue: s.NullValue, + Numeric: s.Numeric, + Properties: s.Properties, + Rules: s.Rules, + Similarity: s.Similarity, + Store: s.Store, + Strength: s.Strength, + Type: s.Type, + VariableTop: s.VariableTop, + Variant: s.Variant, + } + + tmp.Type = "icu_collation_keyword" + + return json.Marshal(tmp) +} + +// NewIcuCollationProperty returns a IcuCollationProperty. +func NewIcuCollationProperty() *IcuCollationProperty { + r := &IcuCollationProperty{ + Fields: make(map[string]Property, 0), + Meta: make(map[string]string, 0), + Properties: make(map[string]Property, 0), + } + + return r +} diff --git a/typedapi/types/icucollationtokenfilter.go b/typedapi/types/icucollationtokenfilter.go index 921785417a..e0812a54d8 100644 --- a/typedapi/types/icucollationtokenfilter.go +++ b/typedapi/types/icucollationtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -36,20 +36,20 @@ import ( // IcuCollationTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/icu-plugin.ts#L51-L65 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/icu-plugin.ts#L51-L65 type IcuCollationTokenFilter struct { Alternate *icucollationalternate.IcuCollationAlternate `json:"alternate,omitempty"` - CaseFirst *icucollationcasefirst.IcuCollationCaseFirst `json:"caseFirst,omitempty"` - CaseLevel *bool `json:"caseLevel,omitempty"` + CaseFirst *icucollationcasefirst.IcuCollationCaseFirst `json:"case_first,omitempty"` + CaseLevel *bool `json:"case_level,omitempty"` Country *string `json:"country,omitempty"` Decomposition *icucollationdecomposition.IcuCollationDecomposition `json:"decomposition,omitempty"` - HiraganaQuaternaryMode *bool `json:"hiraganaQuaternaryMode,omitempty"` + HiraganaQuaternaryMode *bool `json:"hiragana_quaternary_mode,omitempty"` Language *string `json:"language,omitempty"` Numeric *bool `json:"numeric,omitempty"` Rules *string `json:"rules,omitempty"` Strength *icucollationstrength.IcuCollationStrength `json:"strength,omitempty"` Type string `json:"type,omitempty"` - VariableTop *string `json:"variableTop,omitempty"` + VariableTop *string `json:"variable_top,omitempty"` Variant *string `json:"variant,omitempty"` Version *string `json:"version,omitempty"` } @@ -74,13 +74,13 @@ func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Alternate", err) } - case "caseFirst": + case "case_first": if err := dec.Decode(&s.CaseFirst); err != nil { return fmt.Errorf("%s | %w", "CaseFirst", err) } - case "caseLevel": - var tmp interface{} + case "case_level": + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -110,8 +110,8 @@ func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Decomposition", err) } - case "hiraganaQuaternaryMode": - var tmp interface{} + case "hiragana_quaternary_mode": + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -137,7 +137,7 @@ func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { s.Language = &o case "numeric": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -172,7 +172,7 @@ func (s *IcuCollationTokenFilter) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Type", err) } - case "variableTop": + case "variable_top": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { return fmt.Errorf("%s | %w", "VariableTop", err) diff --git a/typedapi/types/icufoldingtokenfilter.go b/typedapi/types/icufoldingtokenfilter.go index 8dd1f16d00..cd4699bdcd 100644 --- a/typedapi/types/icufoldingtokenfilter.go +++ b/typedapi/types/icufoldingtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IcuFoldingTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/icu-plugin.ts#L46-L49 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/icu-plugin.ts#L46-L49 type IcuFoldingTokenFilter struct { Type string `json:"type,omitempty"` UnicodeSetFilter string `json:"unicode_set_filter"` diff --git a/typedapi/types/icunormalizationcharfilter.go b/typedapi/types/icunormalizationcharfilter.go index 6d8873bb08..b0e0a928ff 100644 --- a/typedapi/types/icunormalizationcharfilter.go +++ b/typedapi/types/icunormalizationcharfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // IcuNormalizationCharFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/icu-plugin.ts#L40-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/icu-plugin.ts#L40-L44 type IcuNormalizationCharFilter struct { Mode *icunormalizationmode.IcuNormalizationMode `json:"mode,omitempty"` Name *icunormalizationtype.IcuNormalizationType `json:"name,omitempty"` diff --git a/typedapi/types/icunormalizationtokenfilter.go b/typedapi/types/icunormalizationtokenfilter.go index 15c56c700e..85e9e405f7 100644 --- a/typedapi/types/icunormalizationtokenfilter.go +++ b/typedapi/types/icunormalizationtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // IcuNormalizationTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/icu-plugin.ts#L35-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/icu-plugin.ts#L35-L38 type IcuNormalizationTokenFilter struct { Name icunormalizationtype.IcuNormalizationType `json:"name"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/icutokenizer.go b/typedapi/types/icutokenizer.go index 1946a09616..f096b70d3f 100644 --- a/typedapi/types/icutokenizer.go +++ b/typedapi/types/icutokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IcuTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/icu-plugin.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/icu-plugin.ts#L30-L33 type IcuTokenizer struct { RuleFiles string `json:"rule_files"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/icutransformtokenfilter.go b/typedapi/types/icutransformtokenfilter.go index 9aa4c4e2cd..77143f5237 100644 --- a/typedapi/types/icutransformtokenfilter.go +++ b/typedapi/types/icutransformtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // IcuTransformTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/icu-plugin.ts#L24-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/icu-plugin.ts#L24-L28 type IcuTransformTokenFilter struct { Dir *icutransformdirection.IcuTransformDirection `json:"dir,omitempty"` Id string `json:"id"` diff --git a/typedapi/types/ids.go b/typedapi/types/ids.go index b3664c04f7..1c14ec7b9c 100644 --- a/typedapi/types/ids.go +++ b/typedapi/types/ids.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Ids type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L62-L62 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L62-L62 type Ids []string diff --git a/typedapi/types/idsquery.go b/typedapi/types/idsquery.go index 78e7308c40..406728fdf2 100644 --- a/typedapi/types/idsquery.go +++ b/typedapi/types/idsquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IdsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L80-L85 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L81-L86 type IdsQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -60,7 +60,7 @@ func (s *IdsQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/ilm.go b/typedapi/types/ilm.go index c8c0224596..28512a5a3e 100644 --- a/typedapi/types/ilm.go +++ b/typedapi/types/ilm.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Ilm type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L162-L165 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L162-L165 type Ilm struct { PolicyCount int `json:"policy_count"` PolicyStats []IlmPolicyStatistics `json:"policy_stats"` @@ -54,7 +54,7 @@ func (s *Ilm) UnmarshalJSON(data []byte) error { case "policy_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/ilmindicator.go b/typedapi/types/ilmindicator.go index 88c19bac64..81dca5f50e 100644 --- a/typedapi/types/ilmindicator.go +++ b/typedapi/types/ilmindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // IlmIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L145-L149 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L145-L149 type IlmIndicator struct { Details *IlmIndicatorDetails `json:"details,omitempty"` Diagnosis []Diagnosis `json:"diagnosis,omitempty"` diff --git a/typedapi/types/ilmindicatordetails.go b/typedapi/types/ilmindicatordetails.go index 7decb394a4..879d48b425 100644 --- a/typedapi/types/ilmindicatordetails.go +++ b/typedapi/types/ilmindicatordetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // IlmIndicatorDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L150-L153 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L150-L153 type IlmIndicatorDetails struct { IlmStatus lifecycleoperationmode.LifecycleOperationMode `json:"ilm_status"` Policies int64 `json:"policies"` @@ -60,7 +60,7 @@ func (s *IlmIndicatorDetails) UnmarshalJSON(data []byte) error { } case "policies": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/ilmpolicy.go b/typedapi/types/ilmpolicy.go index e35977a94a..ab8a6632bb 100644 --- a/typedapi/types/ilmpolicy.go +++ b/typedapi/types/ilmpolicy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // IlmPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/_types/Policy.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/_types/Policy.ts#L23-L26 type IlmPolicy struct { Meta_ Metadata `json:"_meta,omitempty"` Phases Phases `json:"phases"` diff --git a/typedapi/types/ilmpolicystatistics.go b/typedapi/types/ilmpolicystatistics.go index bf19248a5b..4b335eee21 100644 --- a/typedapi/types/ilmpolicystatistics.go +++ b/typedapi/types/ilmpolicystatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IlmPolicyStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L157-L160 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L157-L160 type IlmPolicyStatistics struct { IndicesManaged int `json:"indices_managed"` Phases Phases `json:"phases"` @@ -54,7 +54,7 @@ func (s *IlmPolicyStatistics) UnmarshalJSON(data []byte) error { case "indices_managed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/impact.go b/typedapi/types/impact.go index 92fff95e80..c8ef312940 100644 --- a/typedapi/types/impact.go +++ b/typedapi/types/impact.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // Impact type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L65-L70 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L65-L70 type Impact struct { Description string `json:"description"` Id string `json:"id"` @@ -87,7 +87,7 @@ func (s *Impact) UnmarshalJSON(data []byte) error { case "severity": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indexaction.go b/typedapi/types/indexaction.go index 1d851e7200..0dc093ee19 100644 --- a/typedapi/types/indexaction.go +++ b/typedapi/types/indexaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // IndexAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L256-L265 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L256-L265 type IndexAction struct { DocId *string `json:"doc_id,omitempty"` ExecutionTimeField *string `json:"execution_time_field,omitempty"` diff --git a/typedapi/types/indexaliases.go b/typedapi/types/indexaliases.go index 65d4382679..1d9b7bdb42 100644 --- a/typedapi/types/indexaliases.go +++ b/typedapi/types/indexaliases.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IndexAliases type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get_alias/IndicesGetAliasResponse.ts#L37-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get_alias/IndicesGetAliasResponse.ts#L37-L39 type IndexAliases struct { Aliases map[string]AliasDefinition `json:"aliases"` } diff --git a/typedapi/types/indexanddatastreamaction.go b/typedapi/types/indexanddatastreamaction.go index dbc26aad63..f00f8e68ee 100644 --- a/typedapi/types/indexanddatastreamaction.go +++ b/typedapi/types/indexanddatastreamaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // IndexAndDataStreamAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/modify_data_stream/types.ts#L39-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/modify_data_stream/types.ts#L39-L44 type IndexAndDataStreamAction struct { // DataStream Data stream targeted by the action. DataStream string `json:"data_stream"` diff --git a/typedapi/types/indexcapabilities.go b/typedapi/types/indexcapabilities.go index 66c1c2d990..6b0300beae 100644 --- a/typedapi/types/indexcapabilities.go +++ b/typedapi/types/indexcapabilities.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IndexCapabilities type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/get_rollup_index_caps/types.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/get_rollup_index_caps/types.ts#L24-L26 type IndexCapabilities struct { RollupJobs []RollupJobSummary `json:"rollup_jobs"` } diff --git a/typedapi/types/indexdetails.go b/typedapi/types/indexdetails.go index 48e374670c..9818b62269 100644 --- a/typedapi/types/indexdetails.go +++ b/typedapi/types/indexdetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndexDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotIndexDetails.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotIndexDetails.ts#L23-L28 type IndexDetails struct { MaxSegmentsPerShard int64 `json:"max_segments_per_shard"` ShardCount int `json:"shard_count"` @@ -55,7 +55,7 @@ func (s *IndexDetails) UnmarshalJSON(data []byte) error { switch t { case "max_segments_per_shard": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *IndexDetails) UnmarshalJSON(data []byte) error { case "shard_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *IndexDetails) UnmarshalJSON(data []byte) error { } case "size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indexfield.go b/typedapi/types/indexfield.go index 42b34d8f54..b90f745fd8 100644 --- a/typedapi/types/indexfield.go +++ b/typedapi/types/indexfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndexField type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/meta-fields.ts#L46-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/meta-fields.ts#L46-L48 type IndexField struct { Enabled bool `json:"enabled"` } @@ -52,7 +52,7 @@ func (s *IndexField) UnmarshalJSON(data []byte) error { switch t { case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indexhealthstats.go b/typedapi/types/indexhealthstats.go index 151c1ce194..04c14dd0a8 100644 --- a/typedapi/types/indexhealthstats.go +++ b/typedapi/types/indexhealthstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // IndexHealthStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/health/types.ts#L24-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/health/types.ts#L24-L34 type IndexHealthStats struct { ActivePrimaryShards int `json:"active_primary_shards"` ActiveShards int `json:"active_shards"` @@ -63,7 +63,7 @@ func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { case "active_primary_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -79,7 +79,7 @@ func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { case "active_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { case "initializing_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -111,7 +111,7 @@ func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { case "number_of_replicas": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -127,7 +127,7 @@ func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { case "number_of_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -143,7 +143,7 @@ func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { case "relocating_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -172,7 +172,7 @@ func (s *IndexHealthStats) UnmarshalJSON(data []byte) error { case "unassigned_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indexingpressurememorysummary.go b/typedapi/types/indexingpressurememorysummary.go index ef589f4143..9306e1ccc2 100644 --- a/typedapi/types/indexingpressurememorysummary.go +++ b/typedapi/types/indexingpressurememorysummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndexingPressureMemorySummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L580-L589 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L580-L589 type IndexingPressureMemorySummary struct { AllInBytes int64 `json:"all_in_bytes"` CombinedCoordinatingAndPrimaryInBytes int64 `json:"combined_coordinating_and_primary_in_bytes"` @@ -59,7 +59,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { switch t { case "all_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -74,7 +74,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { } case "combined_coordinating_and_primary_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -89,7 +89,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { } case "coordinating_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -104,7 +104,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { } case "coordinating_rejections": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -119,7 +119,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { } case "primary_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -134,7 +134,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { } case "primary_rejections": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -149,7 +149,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { } case "replica_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -164,7 +164,7 @@ func (s *IndexingPressureMemorySummary) UnmarshalJSON(data []byte) error { } case "replica_rejections": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indexingslowlogsettings.go b/typedapi/types/indexingslowlogsettings.go index 46f806b08b..dfd9c63022 100644 --- a/typedapi/types/indexingslowlogsettings.go +++ b/typedapi/types/indexingslowlogsettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndexingSlowlogSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L554-L559 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L563-L568 type IndexingSlowlogSettings struct { Level *string `json:"level,omitempty"` Reformat *bool `json:"reformat,omitempty"` @@ -67,7 +67,7 @@ func (s *IndexingSlowlogSettings) UnmarshalJSON(data []byte) error { s.Level = &o case "reformat": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -82,7 +82,7 @@ func (s *IndexingSlowlogSettings) UnmarshalJSON(data []byte) error { case "source": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indexingslowlogtresholds.go b/typedapi/types/indexingslowlogtresholds.go index f0c6f5e883..d396cde18b 100644 --- a/typedapi/types/indexingslowlogtresholds.go +++ b/typedapi/types/indexingslowlogtresholds.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IndexingSlowlogTresholds type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L561-L568 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L570-L577 type IndexingSlowlogTresholds struct { // Index The indexing slow log, similar in functionality to the search slow log. The // log file name ends with `_index_indexing_slowlog.json`. diff --git a/typedapi/types/indexingstats.go b/typedapi/types/indexingstats.go index 530e6ab698..c5284158d6 100644 --- a/typedapi/types/indexingstats.go +++ b/typedapi/types/indexingstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndexingStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L143-L159 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L143-L159 type IndexingStats struct { DeleteCurrent int64 `json:"delete_current"` DeleteTime Duration `json:"delete_time,omitempty"` @@ -66,7 +66,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { switch t { case "delete_current": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { } case "delete_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -106,7 +106,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { } case "index_current": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -121,7 +121,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { } case "index_failed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -146,7 +146,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { } case "index_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -161,7 +161,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { } case "is_throttled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -175,7 +175,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { } case "noop_update_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -208,7 +208,7 @@ func (s *IndexingStats) UnmarshalJSON(data []byte) error { } case "write_load": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indexmappingrecord.go b/typedapi/types/indexmappingrecord.go index 1b6a9fe4fe..729074618c 100644 --- a/typedapi/types/indexmappingrecord.go +++ b/typedapi/types/indexmappingrecord.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IndexMappingRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get_mapping/IndicesGetMappingResponse.ts#L29-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get_mapping/IndicesGetMappingResponse.ts#L29-L32 type IndexMappingRecord struct { Item *TypeMapping `json:"item,omitempty"` Mappings TypeMapping `json:"mappings"` diff --git a/typedapi/types/indexoperation.go b/typedapi/types/indexoperation.go index 5423e0a16b..597035f5d6 100644 --- a/typedapi/types/indexoperation.go +++ b/typedapi/types/indexoperation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // IndexOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/bulk/types.ts#L132-L132 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/bulk/types.ts#L132-L132 type IndexOperation struct { // DynamicTemplates A map from the full name of fields to the name of dynamic templates. // Defaults to an empty map. @@ -91,7 +91,7 @@ func (s *IndexOperation) UnmarshalJSON(data []byte) error { } case "if_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -128,7 +128,7 @@ func (s *IndexOperation) UnmarshalJSON(data []byte) error { s.Pipeline = &o case "require_alias": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indexprivilegescheck.go b/typedapi/types/indexprivilegescheck.go index 0edf1d6f23..b993f19150 100644 --- a/typedapi/types/indexprivilegescheck.go +++ b/typedapi/types/indexprivilegescheck.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // IndexPrivilegesCheck type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/has_privileges/types.ts#L33-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/has_privileges/types.ts#L33-L44 type IndexPrivilegesCheck struct { // AllowRestrictedIndices This needs to be set to true (default is false) if using wildcards or regexps // for patterns that cover restricted indices. @@ -66,7 +66,7 @@ func (s *IndexPrivilegesCheck) UnmarshalJSON(data []byte) error { switch t { case "allow_restricted_indices": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indexresult.go b/typedapi/types/indexresult.go index 4d90ef2c98..0b1e4f697f 100644 --- a/typedapi/types/indexresult.go +++ b/typedapi/types/indexresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IndexResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L267-L269 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L267-L269 type IndexResult struct { Response IndexResultSummary `json:"response"` } diff --git a/typedapi/types/indexresultsummary.go b/typedapi/types/indexresultsummary.go index dfadf7a96e..cac33a86da 100644 --- a/typedapi/types/indexresultsummary.go +++ b/typedapi/types/indexresultsummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // IndexResultSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L271-L277 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L271-L277 type IndexResultSummary struct { Created bool `json:"created"` Id string `json:"id"` @@ -58,7 +58,7 @@ func (s *IndexResultSummary) UnmarshalJSON(data []byte) error { switch t { case "created": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indexrouting.go b/typedapi/types/indexrouting.go index 7d3be07b0a..0bd0153849 100644 --- a/typedapi/types/indexrouting.go +++ b/typedapi/types/indexrouting.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IndexRouting type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexRouting.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexRouting.ts#L22-L25 type IndexRouting struct { Allocation *IndexRoutingAllocation `json:"allocation,omitempty"` Rebalance *IndexRoutingRebalance `json:"rebalance,omitempty"` diff --git a/typedapi/types/indexroutingallocation.go b/typedapi/types/indexroutingallocation.go index b353e3021b..5841d28782 100644 --- a/typedapi/types/indexroutingallocation.go +++ b/typedapi/types/indexroutingallocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // IndexRoutingAllocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexRouting.ts#L27-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexRouting.ts#L27-L32 type IndexRoutingAllocation struct { Disk *IndexRoutingAllocationDisk `json:"disk,omitempty"` Enable *indexroutingallocationoptions.IndexRoutingAllocationOptions `json:"enable,omitempty"` diff --git a/typedapi/types/indexroutingallocationdisk.go b/typedapi/types/indexroutingallocationdisk.go index c8a1dbc1c7..7ea588aece 100644 --- a/typedapi/types/indexroutingallocationdisk.go +++ b/typedapi/types/indexroutingallocationdisk.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndexRoutingAllocationDisk type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexRouting.ts#L62-L64 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexRouting.ts#L62-L64 type IndexRoutingAllocationDisk struct { ThresholdEnabled string `json:"threshold_enabled,omitempty"` } diff --git a/typedapi/types/indexroutingallocationinclude.go b/typedapi/types/indexroutingallocationinclude.go index ade17425bc..36dd914f58 100644 --- a/typedapi/types/indexroutingallocationinclude.go +++ b/typedapi/types/indexroutingallocationinclude.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndexRoutingAllocationInclude type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexRouting.ts#L52-L55 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexRouting.ts#L52-L55 type IndexRoutingAllocationInclude struct { Id_ *string `json:"_id,omitempty"` TierPreference_ *string `json:"_tier_preference,omitempty"` diff --git a/typedapi/types/indexroutingallocationinitialrecovery.go b/typedapi/types/indexroutingallocationinitialrecovery.go index e769e75120..6f377254e0 100644 --- a/typedapi/types/indexroutingallocationinitialrecovery.go +++ b/typedapi/types/indexroutingallocationinitialrecovery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // IndexRoutingAllocationInitialRecovery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexRouting.ts#L57-L59 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexRouting.ts#L57-L59 type IndexRoutingAllocationInitialRecovery struct { Id_ *string `json:"_id,omitempty"` } diff --git a/typedapi/types/indexroutingrebalance.go b/typedapi/types/indexroutingrebalance.go index 3a8bbbfdf1..9675c95d30 100644 --- a/typedapi/types/indexroutingrebalance.go +++ b/typedapi/types/indexroutingrebalance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // IndexRoutingRebalance type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexRouting.ts#L34-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexRouting.ts#L34-L36 type IndexRoutingRebalance struct { Enable indexroutingrebalanceoptions.IndexRoutingRebalanceOptions `json:"enable"` } diff --git a/typedapi/types/indexsegment.go b/typedapi/types/indexsegment.go index e9c8dbdaaa..f874c61d06 100644 --- a/typedapi/types/indexsegment.go +++ b/typedapi/types/indexsegment.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IndexSegment type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/segments/types.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/segments/types.ts#L24-L26 type IndexSegment struct { Shards map[string][]ShardsSegment `json:"shards"` } diff --git a/typedapi/types/indexsegmentsort.go b/typedapi/types/indexsegmentsort.go index 946d120914..452ea9e94c 100644 --- a/typedapi/types/indexsegmentsort.go +++ b/typedapi/types/indexsegmentsort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // IndexSegmentSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSegmentSort.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSegmentSort.ts#L22-L27 type IndexSegmentSort struct { Field []string `json:"field,omitempty"` Missing []segmentsortmissing.SegmentSortMissing `json:"missing,omitempty"` diff --git a/typedapi/types/indexsettingblocks.go b/typedapi/types/indexsettingblocks.go index efbc8b5512..09f0c348a8 100644 --- a/typedapi/types/indexsettingblocks.go +++ b/typedapi/types/indexsettingblocks.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // IndexSettingBlocks type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L252-L258 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L254-L260 type IndexSettingBlocks struct { Metadata Stringifiedboolean `json:"metadata,omitempty"` Read Stringifiedboolean `json:"read,omitempty"` diff --git a/typedapi/types/indexsettings.go b/typedapi/types/indexsettings.go index b1d9125dfe..4512a12cdd 100644 --- a/typedapi/types/indexsettings.go +++ b/typedapi/types/indexsettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // IndexSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L69-L167 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L69-L169 type IndexSettings struct { Analysis *IndexSettingsAnalysis `json:"analysis,omitempty"` // Analyze Settings to define analyzers, tokenizers, token filters and character @@ -236,7 +236,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { } case "load_fixed_bitset_filters_eagerly": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -256,7 +256,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "max_docvalue_fields_search": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -272,7 +272,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "max_inner_result_window": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -288,7 +288,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "max_ngram_diff": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -304,7 +304,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "max_refresh_listeners": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -320,7 +320,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "max_regex_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -336,7 +336,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "max_rescore_window": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -352,7 +352,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "max_result_window": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -368,7 +368,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "max_script_fields": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -384,7 +384,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "max_shingle_diff": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -400,7 +400,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "max_slices_per_scroll": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -416,7 +416,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "max_terms_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -461,7 +461,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "number_of_routing_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -562,7 +562,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -648,7 +648,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { case "top_metrics_max_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -711,7 +711,7 @@ func (s *IndexSettings) UnmarshalJSON(data []byte) error { func (s IndexSettings) MarshalJSON() ([]byte, error) { type opt IndexSettings // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/indexsettingsanalysis.go b/typedapi/types/indexsettingsanalysis.go index 654ded4f5c..1df2e486b9 100644 --- a/typedapi/types/indexsettingsanalysis.go +++ b/typedapi/types/indexsettingsanalysis.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -29,7 +29,7 @@ import ( // IndexSettingsAnalysis type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L317-L323 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L319-L325 type IndexSettingsAnalysis struct { Analyzer map[string]Analyzer `json:"analyzer,omitempty"` CharFilter map[string]CharFilter `json:"char_filter,omitempty"` @@ -60,7 +60,7 @@ func (s *IndexSettingsAnalysis) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -169,7 +169,7 @@ func (s *IndexSettingsAnalysis) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -222,7 +222,7 @@ func (s *IndexSettingsAnalysis) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -527,12 +527,14 @@ func (s *IndexSettingsAnalysis) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) buf.Seek(0, io.SeekStart) - + if _, ok := kind["type"]; !ok { + kind["type"] = "custom" + } switch kind["type"] { case "lowercase": oo := NewLowercaseNormalizer() @@ -562,7 +564,7 @@ func (s *IndexSettingsAnalysis) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) diff --git a/typedapi/types/indexsettingslifecycle.go b/typedapi/types/indexsettingslifecycle.go index e51a1d97a5..a7b5123430 100644 --- a/typedapi/types/indexsettingslifecycle.go +++ b/typedapi/types/indexsettingslifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndexSettingsLifecycle type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L274-L307 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L276-L309 type IndexSettingsLifecycle struct { // IndexingComplete Indicates whether or not the index has been rolled over. Automatically set to // true when ILM completes the rollover action. @@ -39,7 +39,7 @@ type IndexSettingsLifecycle struct { IndexingComplete Stringifiedboolean `json:"indexing_complete,omitempty"` // Name The name of the policy to use to manage the index. For information about how // Elasticsearch applies policy changes, see Policy updates. - Name string `json:"name"` + Name *string `json:"name,omitempty"` // OriginationDate If specified, this is the timestamp used to calculate the index age for its // phase transitions. Use this setting // if you create a new index that contains old data and want to use the original @@ -90,7 +90,7 @@ func (s *IndexSettingsLifecycle) UnmarshalJSON(data []byte) error { } case "origination_date": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -105,7 +105,7 @@ func (s *IndexSettingsLifecycle) UnmarshalJSON(data []byte) error { } case "parse_origination_date": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indexsettingslifecyclestep.go b/typedapi/types/indexsettingslifecyclestep.go index 0315d390ab..757b98b919 100644 --- a/typedapi/types/indexsettingslifecyclestep.go +++ b/typedapi/types/indexsettingslifecyclestep.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // IndexSettingsLifecycleStep type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L309-L315 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L311-L317 type IndexSettingsLifecycleStep struct { // WaitTimeThreshold Time to wait for the cluster to resolve allocation issues during an ILM // shrink action. Must be greater than 1h (1 hour). diff --git a/typedapi/types/indexsettingstimeseries.go b/typedapi/types/indexsettingstimeseries.go index af7b841836..0a3136c31d 100644 --- a/typedapi/types/indexsettingstimeseries.go +++ b/typedapi/types/indexsettingstimeseries.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // IndexSettingsTimeSeries type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L325-L328 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L327-L330 type IndexSettingsTimeSeries struct { EndTime DateTime `json:"end_time,omitempty"` StartTime DateTime `json:"start_time,omitempty"` diff --git a/typedapi/types/indexstate.go b/typedapi/types/indexstate.go index 7d661559d8..f9182f6ae6 100644 --- a/typedapi/types/indexstate.go +++ b/typedapi/types/indexstate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // IndexState type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexState.ts#L27-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexState.ts#L27-L40 type IndexState struct { Aliases map[string]Alias `json:"aliases,omitempty"` DataStream *string `json:"data_stream,omitempty"` diff --git a/typedapi/types/indexstats.go b/typedapi/types/indexstats.go index 85a921df0a..ac762abfe4 100644 --- a/typedapi/types/indexstats.go +++ b/typedapi/types/indexstats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IndexStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/types.ts#L52-L93 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/types.ts#L52-L93 type IndexStats struct { Bulk *BulkStats `json:"bulk,omitempty"` // Completion Contains statistics about completions across all shards assigned to the node. diff --git a/typedapi/types/indextemplate.go b/typedapi/types/indextemplate.go index 35b61fa1e3..79a833c123 100644 --- a/typedapi/types/indextemplate.go +++ b/typedapi/types/indextemplate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndexTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexTemplate.ts#L31-L70 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexTemplate.ts#L31-L70 type IndexTemplate struct { AllowAutoCreate *bool `json:"allow_auto_create,omitempty"` // ComposedOf An ordered list of component template names. @@ -80,7 +80,7 @@ func (s *IndexTemplate) UnmarshalJSON(data []byte) error { switch t { case "allow_auto_create": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *IndexTemplate) UnmarshalJSON(data []byte) error { } case "priority": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indextemplatedatastreamconfiguration.go b/typedapi/types/indextemplatedatastreamconfiguration.go index a421d50ead..a121ad7367 100644 --- a/typedapi/types/indextemplatedatastreamconfiguration.go +++ b/typedapi/types/indextemplatedatastreamconfiguration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndexTemplateDataStreamConfiguration type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexTemplate.ts#L72-L83 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexTemplate.ts#L72-L83 type IndexTemplateDataStreamConfiguration struct { // AllowCustomRouting If true, the data stream supports custom routing. AllowCustomRouting *bool `json:"allow_custom_routing,omitempty"` @@ -55,7 +55,7 @@ func (s *IndexTemplateDataStreamConfiguration) UnmarshalJSON(data []byte) error switch t { case "allow_custom_routing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -69,7 +69,7 @@ func (s *IndexTemplateDataStreamConfiguration) UnmarshalJSON(data []byte) error } case "hidden": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indextemplateitem.go b/typedapi/types/indextemplateitem.go index 6cf4856b05..2c0addb8ad 100644 --- a/typedapi/types/indextemplateitem.go +++ b/typedapi/types/indextemplateitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // IndexTemplateItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get_index_template/IndicesGetIndexTemplateResponse.ts#L29-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get_index_template/IndicesGetIndexTemplateResponse.ts#L29-L32 type IndexTemplateItem struct { IndexTemplate IndexTemplate `json:"index_template"` Name string `json:"name"` diff --git a/typedapi/types/indextemplatemapping.go b/typedapi/types/indextemplatemapping.go index 484ce7d2dd..9640567e54 100644 --- a/typedapi/types/indextemplatemapping.go +++ b/typedapi/types/indextemplatemapping.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IndexTemplateMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/put_index_template/IndicesPutIndexTemplateRequest.ts#L97-L119 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/put_index_template/IndicesPutIndexTemplateRequest.ts#L121-L143 type IndexTemplateMapping struct { // Aliases Aliases to add. // If the index template includes a `data_stream` object, these are data stream diff --git a/typedapi/types/indextemplatesummary.go b/typedapi/types/indextemplatesummary.go index ee138618cd..490b85adf5 100644 --- a/typedapi/types/indextemplatesummary.go +++ b/typedapi/types/indextemplatesummary.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IndexTemplateSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexTemplate.ts#L85-L107 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexTemplate.ts#L85-L107 type IndexTemplateSummary struct { // Aliases Aliases to add. // If the index template includes a `data_stream` object, these are data stream diff --git a/typedapi/types/indexversioning.go b/typedapi/types/indexversioning.go index aecfb803c8..9ab2b36cad 100644 --- a/typedapi/types/indexversioning.go +++ b/typedapi/types/indexversioning.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndexVersioning type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L269-L272 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L271-L274 type IndexVersioning struct { Created *string `json:"created,omitempty"` CreatedString *string `json:"created_string,omitempty"` diff --git a/typedapi/types/indicatornode.go b/typedapi/types/indicatornode.go index 8b14997d7d..deddd88692 100644 --- a/typedapi/types/indicatornode.go +++ b/typedapi/types/indicatornode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,10 +31,10 @@ import ( // IndicatorNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L90-L93 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L90-L93 type IndicatorNode struct { - Name string `json:"name,omitempty"` - NodeId string `json:"node_id,omitempty"` + Name *string `json:"name,omitempty"` + NodeId *string `json:"node_id,omitempty"` } func (s *IndicatorNode) UnmarshalJSON(data []byte) error { @@ -62,7 +62,7 @@ func (s *IndicatorNode) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.Name = o + s.Name = &o case "node_id": var tmp json.RawMessage @@ -74,7 +74,7 @@ func (s *IndicatorNode) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.NodeId = o + s.NodeId = &o } } diff --git a/typedapi/types/indicators.go b/typedapi/types/indicators.go index a94793758b..4d186a77fa 100644 --- a/typedapi/types/indicators.go +++ b/typedapi/types/indicators.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Indicators type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L32-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L32-L40 type Indicators struct { Disk *DiskIndicator `json:"disk,omitempty"` Ilm *IlmIndicator `json:"ilm,omitempty"` diff --git a/typedapi/types/indices.go b/typedapi/types/indices.go index 73dcfa9044..c7150d7ee7 100644 --- a/typedapi/types/indices.go +++ b/typedapi/types/indices.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Indices type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L67-L67 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L67-L67 type Indices []string diff --git a/typedapi/types/indicesaction.go b/typedapi/types/indicesaction.go index 9ef428d005..13df26067c 100644 --- a/typedapi/types/indicesaction.go +++ b/typedapi/types/indicesaction.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IndicesAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/update_aliases/types.ts#L23-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/update_aliases/types.ts#L23-L39 type IndicesAction struct { // Add Adds a data stream or index to an alias. // If the alias doesn’t exist, the `add` action creates it. diff --git a/typedapi/types/indicesblockstatus.go b/typedapi/types/indicesblockstatus.go index f7803e5b45..1220494f25 100644 --- a/typedapi/types/indicesblockstatus.go +++ b/typedapi/types/indicesblockstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndicesBlockStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/add_block/IndicesAddBlockResponse.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/add_block/IndicesAddBlockResponse.ts#L30-L33 type IndicesBlockStatus struct { Blocked bool `json:"blocked"` Name string `json:"name"` @@ -53,7 +53,7 @@ func (s *IndicesBlockStatus) UnmarshalJSON(data []byte) error { switch t { case "blocked": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indicesindexingpressure.go b/typedapi/types/indicesindexingpressure.go index 73e5f0bca9..f86bdcf677 100644 --- a/typedapi/types/indicesindexingpressure.go +++ b/typedapi/types/indicesindexingpressure.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IndicesIndexingPressure type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L541-L543 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L550-L552 type IndicesIndexingPressure struct { Memory IndicesIndexingPressureMemory `json:"memory"` } diff --git a/typedapi/types/indicesindexingpressurememory.go b/typedapi/types/indicesindexingpressurememory.go index 8733e1abc5..99fd1dbae4 100644 --- a/typedapi/types/indicesindexingpressurememory.go +++ b/typedapi/types/indicesindexingpressurememory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndicesIndexingPressureMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L545-L552 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L554-L561 type IndicesIndexingPressureMemory struct { // Limit Number of outstanding bytes that may be consumed by indexing requests. When // this limit is reached or exceeded, @@ -58,7 +58,7 @@ func (s *IndicesIndexingPressureMemory) UnmarshalJSON(data []byte) error { case "limit": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indicesmodifyaction.go b/typedapi/types/indicesmodifyaction.go index 5a42c752ef..9aa1bd09b4 100644 --- a/typedapi/types/indicesmodifyaction.go +++ b/typedapi/types/indicesmodifyaction.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IndicesModifyAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/modify_data_stream/types.ts#L22-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/modify_data_stream/types.ts#L22-L37 type IndicesModifyAction struct { // AddBackingIndex Adds an existing index as a backing index for a data stream. // The index is hidden as part of this operation. diff --git a/typedapi/types/indicesoptions.go b/typedapi/types/indicesoptions.go index ba1f9ceeaf..312655c31e 100644 --- a/typedapi/types/indicesoptions.go +++ b/typedapi/types/indicesoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // IndicesOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L332-L359 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L335-L362 type IndicesOptions struct { // AllowNoIndices If false, the request returns an error if any wildcard expression, index // alias, or `_all` value targets only @@ -70,7 +70,7 @@ func (s *IndicesOptions) UnmarshalJSON(data []byte) error { switch t { case "allow_no_indices": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *IndicesOptions) UnmarshalJSON(data []byte) error { } case "ignore_throttled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *IndicesOptions) UnmarshalJSON(data []byte) error { } case "ignore_unavailable": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indicesprivileges.go b/typedapi/types/indicesprivileges.go index 3dea391ff3..2c065bc53b 100644 --- a/typedapi/types/indicesprivileges.go +++ b/typedapi/types/indicesprivileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // IndicesPrivileges type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/Privileges.ts#L82-L105 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/Privileges.ts#L200-L224 type IndicesPrivileges struct { // AllowRestrictedIndices Set to `true` if using wildcard or regular expressions for patterns that // cover restricted indices. Implicitly, restricted indices have limited @@ -72,7 +72,7 @@ func (s *IndicesPrivileges) UnmarshalJSON(data []byte) error { switch t { case "allow_restricted_indices": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indicesprivilegesquery.go b/typedapi/types/indicesprivilegesquery.go index 1d0a1d580f..d5d6bea084 100644 --- a/typedapi/types/indicesprivilegesquery.go +++ b/typedapi/types/indicesprivilegesquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,5 +26,5 @@ package types // Query // RoleTemplateQuery // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/Privileges.ts#L131-L139 -type IndicesPrivilegesQuery interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/Privileges.ts#L250-L258 +type IndicesPrivilegesQuery any diff --git a/typedapi/types/indicesrecord.go b/typedapi/types/indicesrecord.go index 416f6167b6..840128da9c 100644 --- a/typedapi/types/indicesrecord.go +++ b/typedapi/types/indicesrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndicesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/indices/types.ts#L20-L801 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/indices/types.ts#L20-L801 type IndicesRecord struct { // BulkAvgSizeInBytes average size in bytes of shard bulk BulkAvgSizeInBytes *string `json:"bulk.avg_size_in_bytes,omitempty"` @@ -50,9 +50,9 @@ type IndicesRecord struct { // CreationDateString index creation date (as string) CreationDateString *string `json:"creation.date.string,omitempty"` // DocsCount available docs - DocsCount string `json:"docs.count,omitempty"` + DocsCount *string `json:"docs.count,omitempty"` // DocsDeleted deleted docs - DocsDeleted string `json:"docs.deleted,omitempty"` + DocsDeleted *string `json:"docs.deleted,omitempty"` // FielddataEvictions fielddata evictions FielddataEvictions *string `json:"fielddata.evictions,omitempty"` // FielddataMemorySize used fielddata cache @@ -229,7 +229,7 @@ type IndicesRecord struct { // PriSegmentsVersionMapMemory memory used by version map PriSegmentsVersionMapMemory *string `json:"pri.segments.version_map_memory,omitempty"` // PriStoreSize store size of primaries - PriStoreSize string `json:"pri.store.size,omitempty"` + PriStoreSize *string `json:"pri.store.size,omitempty"` // PriSuggestCurrent number of current suggest ops PriSuggestCurrent *string `json:"pri.suggest.current,omitempty"` // PriSuggestTime time spend in suggest @@ -302,7 +302,7 @@ type IndicesRecord struct { // Status open/close status Status *string `json:"status,omitempty"` // StoreSize store size of primaries & replicas - StoreSize string `json:"store.size,omitempty"` + StoreSize *string `json:"store.size,omitempty"` // SuggestCurrent number of current suggest ops SuggestCurrent *string `json:"suggest.current,omitempty"` // SuggestTime time spend in suggest @@ -440,7 +440,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.DocsCount = o + s.DocsCount = &o case "docs.deleted", "dd", "docsDeleted": var tmp json.RawMessage @@ -452,7 +452,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.DocsDeleted = o + s.DocsDeleted = &o case "fielddata.evictions", "fe", "fielddataEvictions": var tmp json.RawMessage @@ -1508,7 +1508,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.PriStoreSize = o + s.PriStoreSize = &o case "pri.suggest.current": var tmp json.RawMessage @@ -1940,7 +1940,7 @@ func (s *IndicesRecord) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.StoreSize = o + s.StoreSize = &o case "suggest.current", "suc", "suggestCurrent": var tmp json.RawMessage diff --git a/typedapi/types/indicesshardsstats.go b/typedapi/types/indicesshardsstats.go index 5c575602ae..4ab3e2dbfc 100644 --- a/typedapi/types/indicesshardsstats.go +++ b/typedapi/types/indicesshardsstats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IndicesShardsStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L49-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L52-L55 type IndicesShardsStats struct { AllFields FieldSummary `json:"all_fields"` Fields map[string]FieldSummary `json:"fields"` diff --git a/typedapi/types/indicesshardstats.go b/typedapi/types/indicesshardstats.go index d60c5c182f..5b6b81862d 100644 --- a/typedapi/types/indicesshardstats.go +++ b/typedapi/types/indicesshardstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // IndicesShardStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/types.ts#L192-L223 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/types.ts#L192-L223 type IndicesShardStats struct { Bulk *BulkStats `json:"bulk,omitempty"` Commit *ShardCommit `json:"commit,omitempty"` diff --git a/typedapi/types/indicesshardstores.go b/typedapi/types/indicesshardstores.go index bad16c3e27..3c18a8aafe 100644 --- a/typedapi/types/indicesshardstores.go +++ b/typedapi/types/indicesshardstores.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IndicesShardStores type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/shard_stores/types.ts#L26-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/shard_stores/types.ts#L26-L28 type IndicesShardStores struct { Shards map[string]ShardStoreWrapper `json:"shards"` } diff --git a/typedapi/types/indicesstats.go b/typedapi/types/indicesstats.go index 61c58f4e37..e6caf575ed 100644 --- a/typedapi/types/indicesstats.go +++ b/typedapi/types/indicesstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // IndicesStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/types.ts#L95-L110 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/types.ts#L95-L110 type IndicesStats struct { Health *healthstatus.HealthStatus `json:"health,omitempty"` Primaries *IndexStats `json:"primaries,omitempty"` diff --git a/typedapi/types/indicesvalidationexplanation.go b/typedapi/types/indicesvalidationexplanation.go index 8068e41209..d30a02fcc9 100644 --- a/typedapi/types/indicesvalidationexplanation.go +++ b/typedapi/types/indicesvalidationexplanation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndicesValidationExplanation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/validate_query/IndicesValidateQueryResponse.ts#L32-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/validate_query/IndicesValidateQueryResponse.ts#L32-L37 type IndicesValidationExplanation struct { Error *string `json:"error,omitempty"` Explanation *string `json:"explanation,omitempty"` @@ -84,7 +84,7 @@ func (s *IndicesValidationExplanation) UnmarshalJSON(data []byte) error { } case "valid": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/indicesversions.go b/typedapi/types/indicesversions.go index 7eb2118022..df6da7fbe1 100644 --- a/typedapi/types/indicesversions.go +++ b/typedapi/types/indicesversions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IndicesVersions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L263-L268 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L263-L268 type IndicesVersions struct { IndexCount int `json:"index_count"` PrimaryShardCount int `json:"primary_shard_count"` @@ -56,7 +56,7 @@ func (s *IndicesVersions) UnmarshalJSON(data []byte) error { case "index_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *IndicesVersions) UnmarshalJSON(data []byte) error { case "primary_shard_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *IndicesVersions) UnmarshalJSON(data []byte) error { } case "total_primary_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/inferenceaggregate.go b/typedapi/types/inferenceaggregate.go index 721db77a49..381bde3be6 100644 --- a/typedapi/types/inferenceaggregate.go +++ b/typedapi/types/inferenceaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // InferenceAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L659-L670 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L663-L677 type InferenceAggregate struct { Data map[string]json.RawMessage `json:"-"` FeatureImportance []InferenceFeatureImportance `json:"feature_importance,omitempty"` @@ -110,7 +110,7 @@ func (s *InferenceAggregate) UnmarshalJSON(data []byte) error { func (s InferenceAggregate) MarshalJSON() ([]byte, error) { type opt InferenceAggregate // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/inferenceaggregation.go b/typedapi/types/inferenceaggregation.go index 37309de8d9..c0856a387e 100644 --- a/typedapi/types/inferenceaggregation.go +++ b/typedapi/types/inferenceaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // InferenceAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L205-L214 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L205-L214 type InferenceAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -45,10 +45,8 @@ type InferenceAggregation struct { GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` // InferenceConfig Contains the inference type and its options. InferenceConfig *InferenceConfigContainer `json:"inference_config,omitempty"` - Meta Metadata `json:"meta,omitempty"` // ModelId The ID or alias for the trained model. - ModelId string `json:"model_id"` - Name *string `json:"name,omitempty"` + ModelId string `json:"model_id"` } func (s *InferenceAggregation) UnmarshalJSON(data []byte) error { @@ -93,28 +91,11 @@ func (s *InferenceAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "InferenceConfig", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "model_id": if err := dec.Decode(&s.ModelId); err != nil { return fmt.Errorf("%s | %w", "ModelId", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/inferenceclassimportance.go b/typedapi/types/inferenceclassimportance.go index 2970d45cbd..6337cd32a5 100644 --- a/typedapi/types/inferenceclassimportance.go +++ b/typedapi/types/inferenceclassimportance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // InferenceClassImportance type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L684-L687 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L691-L694 type InferenceClassImportance struct { ClassName string `json:"class_name"` Importance Float64 `json:"importance"` @@ -65,7 +65,7 @@ func (s *InferenceClassImportance) UnmarshalJSON(data []byte) error { s.ClassName = o case "importance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/inferenceconfig.go b/typedapi/types/inferenceconfig.go index ca784725f3..e856c17ea7 100644 --- a/typedapi/types/inferenceconfig.go +++ b/typedapi/types/inferenceconfig.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // InferenceConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L746-L758 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L746-L758 type InferenceConfig struct { // Classification Classification configuration for inference. Classification *InferenceConfigClassification `json:"classification,omitempty"` diff --git a/typedapi/types/inferenceconfigclassification.go b/typedapi/types/inferenceconfigclassification.go index 555f5fa943..63e93f3cde 100644 --- a/typedapi/types/inferenceconfigclassification.go +++ b/typedapi/types/inferenceconfigclassification.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // InferenceConfigClassification type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L773-L799 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L773-L799 type InferenceConfigClassification struct { // NumTopClasses Specifies the number of top class predictions to return. NumTopClasses *int `json:"num_top_classes,omitempty"` @@ -64,7 +64,7 @@ func (s *InferenceConfigClassification) UnmarshalJSON(data []byte) error { case "num_top_classes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -80,7 +80,7 @@ func (s *InferenceConfigClassification) UnmarshalJSON(data []byte) error { case "num_top_feature_importance_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/inferenceconfigcontainer.go b/typedapi/types/inferenceconfigcontainer.go index 4ba0e1a05d..d70d397f34 100644 --- a/typedapi/types/inferenceconfigcontainer.go +++ b/typedapi/types/inferenceconfigcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // InferenceConfigContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L216-L222 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L216-L222 type InferenceConfigContainer struct { // Classification Classification configuration for inference. Classification *ClassificationInferenceOptions `json:"classification,omitempty"` diff --git a/typedapi/types/inferenceconfigcreatecontainer.go b/typedapi/types/inferenceconfigcreatecontainer.go index 3854a9df98..086b559a54 100644 --- a/typedapi/types/inferenceconfigcreatecontainer.go +++ b/typedapi/types/inferenceconfigcreatecontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // InferenceConfigCreateContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L23-L80 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L23-L80 type InferenceConfigCreateContainer struct { // Classification Classification configuration for inference. Classification *ClassificationInferenceOptions `json:"classification,omitempty"` diff --git a/typedapi/types/inferenceconfigregression.go b/typedapi/types/inferenceconfigregression.go index 01106e8f64..160ca8a847 100644 --- a/typedapi/types/inferenceconfigregression.go +++ b/typedapi/types/inferenceconfigregression.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // InferenceConfigRegression type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L760-L771 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L760-L771 type InferenceConfigRegression struct { // NumTopFeatureImportanceValues Specifies the maximum number of feature importance values per document. NumTopFeatureImportanceValues *int `json:"num_top_feature_importance_values,omitempty"` @@ -57,7 +57,7 @@ func (s *InferenceConfigRegression) UnmarshalJSON(data []byte) error { case "num_top_feature_importance_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/inferenceconfigupdatecontainer.go b/typedapi/types/inferenceconfigupdatecontainer.go index dc865bb256..193d4fd2c9 100644 --- a/typedapi/types/inferenceconfigupdatecontainer.go +++ b/typedapi/types/inferenceconfigupdatecontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // InferenceConfigUpdateContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L296-L318 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L296-L318 type InferenceConfigUpdateContainer struct { // Classification Classification configuration for inference. Classification *ClassificationInferenceOptions `json:"classification,omitempty"` diff --git a/typedapi/types/modelconfig.go b/typedapi/types/inferenceendpoint.go similarity index 78% rename from typedapi/types/modelconfig.go rename to typedapi/types/inferenceendpoint.go index c1d803dcf3..21e46fffe6 100644 --- a/typedapi/types/modelconfig.go +++ b/typedapi/types/inferenceendpoint.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -29,19 +29,19 @@ import ( "strconv" ) -// ModelConfig type. +// InferenceEndpoint type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/inference/_types/Services.ts#L23-L39 -type ModelConfig struct { +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/_types/Services.ts#L23-L39 +type InferenceEndpoint struct { // Service The service type Service string `json:"service"` // ServiceSettings Settings specific to the service ServiceSettings json.RawMessage `json:"service_settings"` - // TaskSettings Task settings specific to the service and model + // TaskSettings Task settings specific to the service and task type TaskSettings json.RawMessage `json:"task_settings"` } -func (s *ModelConfig) UnmarshalJSON(data []byte) error { +func (s *InferenceEndpoint) UnmarshalJSON(data []byte) error { dec := json.NewDecoder(bytes.NewReader(data)) @@ -83,9 +83,9 @@ func (s *ModelConfig) UnmarshalJSON(data []byte) error { return nil } -// NewModelConfig returns a ModelConfig. -func NewModelConfig() *ModelConfig { - r := &ModelConfig{} +// NewInferenceEndpoint returns a InferenceEndpoint. +func NewInferenceEndpoint() *InferenceEndpoint { + r := &InferenceEndpoint{} return r } diff --git a/typedapi/types/modelconfigcontainer.go b/typedapi/types/inferenceendpointinfo.go similarity index 75% rename from typedapi/types/modelconfigcontainer.go rename to typedapi/types/inferenceendpointinfo.go index bbab956a92..28b24f6776 100644 --- a/typedapi/types/modelconfigcontainer.go +++ b/typedapi/types/inferenceendpointinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,23 +31,23 @@ import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/tasktype" ) -// ModelConfigContainer type. +// InferenceEndpointInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/inference/_types/Services.ts#L41-L53 -type ModelConfigContainer struct { - // ModelId The model Id - ModelId string `json:"model_id"` +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/_types/Services.ts#L41-L53 +type InferenceEndpointInfo struct { + // InferenceId The inference Id + InferenceId string `json:"inference_id"` // Service The service type Service string `json:"service"` // ServiceSettings Settings specific to the service ServiceSettings json.RawMessage `json:"service_settings"` - // TaskSettings Task settings specific to the service and model + // TaskSettings Task settings specific to the service and task type TaskSettings json.RawMessage `json:"task_settings"` - // TaskType The model's task type + // TaskType The task type TaskType tasktype.TaskType `json:"task_type"` } -func (s *ModelConfigContainer) UnmarshalJSON(data []byte) error { +func (s *InferenceEndpointInfo) UnmarshalJSON(data []byte) error { dec := json.NewDecoder(bytes.NewReader(data)) @@ -62,17 +62,17 @@ func (s *ModelConfigContainer) UnmarshalJSON(data []byte) error { switch t { - case "model_id": + case "inference_id": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "ModelId", err) + return fmt.Errorf("%s | %w", "InferenceId", err) } o := string(tmp[:]) o, err = strconv.Unquote(o) if err != nil { o = string(tmp[:]) } - s.ModelId = o + s.InferenceId = o case "service": var tmp json.RawMessage @@ -106,9 +106,9 @@ func (s *ModelConfigContainer) UnmarshalJSON(data []byte) error { return nil } -// NewModelConfigContainer returns a ModelConfigContainer. -func NewModelConfigContainer() *ModelConfigContainer { - r := &ModelConfigContainer{} +// NewInferenceEndpointInfo returns a InferenceEndpointInfo. +func NewInferenceEndpointInfo() *InferenceEndpointInfo { + r := &InferenceEndpointInfo{} return r } diff --git a/typedapi/types/inferencefeatureimportance.go b/typedapi/types/inferencefeatureimportance.go index 4a4aab3d26..c512cbb154 100644 --- a/typedapi/types/inferencefeatureimportance.go +++ b/typedapi/types/inferencefeatureimportance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // InferenceFeatureImportance type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L678-L682 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L685-L689 type InferenceFeatureImportance struct { Classes []InferenceClassImportance `json:"classes,omitempty"` FeatureName string `json:"feature_name"` @@ -71,7 +71,7 @@ func (s *InferenceFeatureImportance) UnmarshalJSON(data []byte) error { s.FeatureName = o case "importance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/inferenceprocessor.go b/typedapi/types/inferenceprocessor.go index fbb241f1c9..83820cb0fb 100644 --- a/typedapi/types/inferenceprocessor.go +++ b/typedapi/types/inferenceprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // InferenceProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L725-L744 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L725-L744 type InferenceProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -105,7 +105,7 @@ func (s *InferenceProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/inferenceresponseresult.go b/typedapi/types/inferenceresponseresult.go index 7eb9ad4081..bc233cc427 100644 --- a/typedapi/types/inferenceresponseresult.go +++ b/typedapi/types/inferenceresponseresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // InferenceResponseResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L459-L506 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L459-L506 type InferenceResponseResult struct { // Entities If the model is trained for named entity recognition (NER) tasks, the // response contains the recognized entities. @@ -99,7 +99,7 @@ func (s *InferenceResponseResult) UnmarshalJSON(data []byte) error { } case "is_truncated": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,8 +113,19 @@ func (s *InferenceResponseResult) UnmarshalJSON(data []byte) error { } case "predicted_value": - if err := dec.Decode(&s.PredictedValue); err != nil { - return fmt.Errorf("%s | %w", "PredictedValue", err) + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(PredictedValue) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "PredictedValue", err) + } + + s.PredictedValue = append(s.PredictedValue, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.PredictedValue); err != nil { + return fmt.Errorf("%s | %w", "PredictedValue", err) + } } case "predicted_value_sequence": @@ -130,7 +141,7 @@ func (s *InferenceResponseResult) UnmarshalJSON(data []byte) error { s.PredictedValueSequence = &o case "prediction_probability": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -146,7 +157,7 @@ func (s *InferenceResponseResult) UnmarshalJSON(data []byte) error { } case "prediction_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/inferenceresult.go b/typedapi/types/inferenceresult.go index f4d10cef73..a47783278e 100644 --- a/typedapi/types/inferenceresult.go +++ b/typedapi/types/inferenceresult.go @@ -16,14 +16,16 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // InferenceResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/inference/_types/Results.ts#L59-L67 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/_types/Results.ts#L79-L89 type InferenceResult struct { + Completion []CompletionResult `json:"completion,omitempty"` + Rerank []RankedDocument `json:"rerank,omitempty"` SparseEmbedding []SparseEmbeddingResult `json:"sparse_embedding,omitempty"` TextEmbedding []TextEmbeddingResult `json:"text_embedding,omitempty"` TextEmbeddingBytes []TextEmbeddingByteResult `json:"text_embedding_bytes,omitempty"` diff --git a/typedapi/types/inferencetopclassentry.go b/typedapi/types/inferencetopclassentry.go index a1509e902f..93cab3ba97 100644 --- a/typedapi/types/inferencetopclassentry.go +++ b/typedapi/types/inferencetopclassentry.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // InferenceTopClassEntry type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L672-L676 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L679-L683 type InferenceTopClassEntry struct { ClassName FieldValue `json:"class_name"` ClassProbability Float64 `json:"class_probability"` @@ -59,7 +59,7 @@ func (s *InferenceTopClassEntry) UnmarshalJSON(data []byte) error { } case "class_probability": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -75,7 +75,7 @@ func (s *InferenceTopClassEntry) UnmarshalJSON(data []byte) error { } case "class_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/influence.go b/typedapi/types/influence.go index a69d4e4b97..276aec5d5e 100644 --- a/typedapi/types/influence.go +++ b/typedapi/types/influence.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Influence type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Anomaly.ts#L140-L143 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Anomaly.ts#L140-L143 type Influence struct { InfluencerFieldName string `json:"influencer_field_name"` InfluencerFieldValues []string `json:"influencer_field_values"` diff --git a/typedapi/types/influencer.go b/typedapi/types/influencer.go index 56dffcc1fe..2d21b53955 100644 --- a/typedapi/types/influencer.go +++ b/typedapi/types/influencer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Influencer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Influencer.ts#L31-L83 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Influencer.ts#L31-L83 type Influencer struct { // BucketSpan The length of the bucket in seconds. This value matches the bucket span that // is specified in the job. @@ -124,7 +124,7 @@ func (s *Influencer) UnmarshalJSON(data []byte) error { s.InfluencerFieldValue = o case "influencer_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -140,7 +140,7 @@ func (s *Influencer) UnmarshalJSON(data []byte) error { } case "initial_influencer_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -156,7 +156,7 @@ func (s *Influencer) UnmarshalJSON(data []byte) error { } case "is_interim": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -175,7 +175,7 @@ func (s *Influencer) UnmarshalJSON(data []byte) error { } case "probability": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/infofeaturestate.go b/typedapi/types/infofeaturestate.go index f3af0f6719..728429035b 100644 --- a/typedapi/types/infofeaturestate.go +++ b/typedapi/types/infofeaturestate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // InfoFeatureState type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotInfoFeatureState.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotInfoFeatureState.ts#L22-L25 type InfoFeatureState struct { FeatureName string `json:"feature_name"` Indices []string `json:"indices"` diff --git a/typedapi/types/ingestpipeline.go b/typedapi/types/ingestpipeline.go index ae5535c844..ee5d4b21ef 100644 --- a/typedapi/types/ingestpipeline.go +++ b/typedapi/types/ingestpipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IngestPipeline type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Pipeline.ts#L23-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Pipeline.ts#L23-L45 type IngestPipeline struct { // Description Description of the ingest pipeline. Description *string `json:"description,omitempty"` diff --git a/typedapi/types/ingesttotal.go b/typedapi/types/ingesttotal.go index e8b1f7d7ea..01f3f81f87 100644 --- a/typedapi/types/ingesttotal.go +++ b/typedapi/types/ingesttotal.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IngestTotal type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L356-L377 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L356-L377 type IngestTotal struct { // Count Total number of documents ingested during the lifetime of this node. Count *int64 `json:"count,omitempty"` @@ -62,7 +62,7 @@ func (s *IngestTotal) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -77,7 +77,7 @@ func (s *IngestTotal) UnmarshalJSON(data []byte) error { } case "current": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *IngestTotal) UnmarshalJSON(data []byte) error { } case "failed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/inlineget.go b/typedapi/types/inlineget.go index e5e5aba73d..4f6c0eb427 100644 --- a/typedapi/types/inlineget.go +++ b/typedapi/types/inlineget.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // InlineGet type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L321-L330 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L320-L333 type InlineGet struct { Fields map[string]json.RawMessage `json:"fields,omitempty"` Found bool `json:"found"` @@ -66,7 +66,7 @@ func (s *InlineGet) UnmarshalJSON(data []byte) error { } case "found": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -80,7 +80,7 @@ func (s *InlineGet) UnmarshalJSON(data []byte) error { } case "_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -131,7 +131,7 @@ func (s *InlineGet) UnmarshalJSON(data []byte) error { func (s InlineGet) MarshalJSON() ([]byte, error) { type opt InlineGet // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/inlinegetdictuserdefined.go b/typedapi/types/inlinegetdictuserdefined.go index 2b19b3694a..2bef08cba8 100644 --- a/typedapi/types/inlinegetdictuserdefined.go +++ b/typedapi/types/inlinegetdictuserdefined.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // InlineGetDictUserDefined type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L321-L330 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L320-L333 type InlineGetDictUserDefined struct { Fields map[string]json.RawMessage `json:"fields,omitempty"` Found bool `json:"found"` @@ -66,7 +66,7 @@ func (s *InlineGetDictUserDefined) UnmarshalJSON(data []byte) error { } case "found": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -80,7 +80,7 @@ func (s *InlineGetDictUserDefined) UnmarshalJSON(data []byte) error { } case "_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -134,7 +134,7 @@ func (s *InlineGetDictUserDefined) UnmarshalJSON(data []byte) error { func (s InlineGetDictUserDefined) MarshalJSON() ([]byte, error) { type opt InlineGetDictUserDefined // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/inlinescript.go b/typedapi/types/inlinescript.go index 90fbc63bd8..be5109519e 100644 --- a/typedapi/types/inlinescript.go +++ b/typedapi/types/inlinescript.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // InlineScript type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Scripting.ts#L67-L79 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Scripting.ts#L67-L79 type InlineScript struct { // Lang Specifies the language the script is written in. Lang *scriptlanguage.ScriptLanguage `json:"lang,omitempty"` diff --git a/typedapi/types/innerhits.go b/typedapi/types/innerhits.go index a4c28c6f20..9c42ae16ce 100644 --- a/typedapi/types/innerhits.go +++ b/typedapi/types/innerhits.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // InnerHits type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/hits.ts#L106-L140 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/hits.ts#L106-L140 type InnerHits struct { Collapse *FieldCollapse `json:"collapse,omitempty"` DocvalueFields []FieldAndFormat `json:"docvalue_fields,omitempty"` @@ -83,7 +83,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { } case "explain": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { case "from": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -134,7 +134,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { } case "ignore_unmapped": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -161,7 +161,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { } case "seq_no_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -176,7 +176,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -228,7 +228,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { } case "track_scores": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -242,7 +242,7 @@ func (s *InnerHits) UnmarshalJSON(data []byte) error { } case "version": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/innerhitsresult.go b/typedapi/types/innerhitsresult.go index 94f389b014..74e0f6aba6 100644 --- a/typedapi/types/innerhitsresult.go +++ b/typedapi/types/innerhitsresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // InnerHitsResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/hits.ts#L84-L86 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/hits.ts#L84-L86 type InnerHitsResult struct { Hits *HitsMetadata `json:"hits,omitempty"` } diff --git a/typedapi/types/inprogress.go b/typedapi/types/inprogress.go index 05e3558c16..c603335a65 100644 --- a/typedapi/types/inprogress.go +++ b/typedapi/types/inprogress.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // InProgress type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/_types/SnapshotLifecycle.ts#L131-L136 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/_types/SnapshotLifecycle.ts#L131-L136 type InProgress struct { Name string `json:"name"` StartTimeMillis int64 `json:"start_time_millis"` diff --git a/typedapi/types/input.go b/typedapi/types/input.go index 9827e94e74..d4bf4b7542 100644 --- a/typedapi/types/input.go +++ b/typedapi/types/input.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // Input type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model/types.ts#L56-L58 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model/types.ts#L56-L58 type Input struct { FieldNames []string `json:"field_names"` } diff --git a/typedapi/types/integernumberproperty.go b/typedapi/types/integernumberproperty.go index 90b0f5ab6b..0cfbdf5f85 100644 --- a/typedapi/types/integernumberproperty.go +++ b/typedapi/types/integernumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // IntegerNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L149-L152 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L157-L160 type IntegerNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -79,7 +79,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -150,7 +150,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -171,7 +171,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -261,12 +261,6 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -285,6 +279,18 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -441,6 +447,12 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -452,7 +464,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -467,7 +479,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -481,7 +493,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -504,7 +516,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { case "null_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -530,7 +542,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -551,7 +563,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -641,12 +653,6 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -665,6 +671,18 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -821,6 +839,12 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -879,7 +903,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -893,7 +917,7 @@ func (s *IntegerNumberProperty) UnmarshalJSON(data []byte) error { } case "time_series_dimension": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/integerrangeproperty.go b/typedapi/types/integerrangeproperty.go index 4e4a4d6e58..091842fd5e 100644 --- a/typedapi/types/integerrangeproperty.go +++ b/typedapi/types/integerrangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // IntegerRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/range.ts#L42-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/range.ts#L42-L44 type IntegerRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -67,7 +67,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,7 +113,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -138,7 +138,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -159,7 +159,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -249,12 +249,6 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -273,6 +267,18 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -429,6 +435,12 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -440,7 +452,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -455,7 +467,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -483,7 +495,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -504,7 +516,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -594,12 +606,6 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -618,6 +624,18 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -774,6 +792,12 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -796,7 +820,7 @@ func (s *IntegerRangeProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/intervals.go b/typedapi/types/intervals.go index 422d10ec2a..89b06077b6 100644 --- a/typedapi/types/intervals.go +++ b/typedapi/types/intervals.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Intervals type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L83-L110 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L83-L110 type Intervals struct { // AllOf Returns matches that span a combination of other rules. AllOf *IntervalsAllOf `json:"all_of,omitempty"` diff --git a/typedapi/types/intervalsallof.go b/typedapi/types/intervalsallof.go index 34fab10339..e8c301a7c6 100644 --- a/typedapi/types/intervalsallof.go +++ b/typedapi/types/intervalsallof.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IntervalsAllOf type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L50-L70 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L50-L70 type IntervalsAllOf struct { // Filter Rule used to filter returned intervals. Filter *IntervalsFilter `json:"filter,omitempty"` @@ -74,7 +74,7 @@ func (s *IntervalsAllOf) UnmarshalJSON(data []byte) error { case "max_gaps": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -89,7 +89,7 @@ func (s *IntervalsAllOf) UnmarshalJSON(data []byte) error { } case "ordered": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/intervalsanyof.go b/typedapi/types/intervalsanyof.go index 971a8464a4..bd46fc96a5 100644 --- a/typedapi/types/intervalsanyof.go +++ b/typedapi/types/intervalsanyof.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IntervalsAnyOf type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L72-L81 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L72-L81 type IntervalsAnyOf struct { // Filter Rule used to filter returned intervals. Filter *IntervalsFilter `json:"filter,omitempty"` diff --git a/typedapi/types/intervalsfilter.go b/typedapi/types/intervalsfilter.go index 2ec5f6cc29..0a86330f75 100644 --- a/typedapi/types/intervalsfilter.go +++ b/typedapi/types/intervalsfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // IntervalsFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L112-L152 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L112-L152 type IntervalsFilter struct { // After Query used to return intervals that follow an interval from the `filter` // rule. diff --git a/typedapi/types/intervalsfuzzy.go b/typedapi/types/intervalsfuzzy.go index cf89bd8d49..6db2af5833 100644 --- a/typedapi/types/intervalsfuzzy.go +++ b/typedapi/types/intervalsfuzzy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IntervalsFuzzy type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L154-L184 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L154-L184 type IntervalsFuzzy struct { // Analyzer Analyzer used to normalize the term. Analyzer *string `json:"analyzer,omitempty"` @@ -85,7 +85,7 @@ func (s *IntervalsFuzzy) UnmarshalJSON(data []byte) error { case "prefix_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -112,7 +112,7 @@ func (s *IntervalsFuzzy) UnmarshalJSON(data []byte) error { s.Term = o case "transpositions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/intervalsmatch.go b/typedapi/types/intervalsmatch.go index f6a55c73a8..4ba8d4ae71 100644 --- a/typedapi/types/intervalsmatch.go +++ b/typedapi/types/intervalsmatch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IntervalsMatch type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L186-L216 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L186-L216 type IntervalsMatch struct { // Analyzer Analyzer used to analyze terms in the query. Analyzer *string `json:"analyzer,omitempty"` @@ -85,7 +85,7 @@ func (s *IntervalsMatch) UnmarshalJSON(data []byte) error { case "max_gaps": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *IntervalsMatch) UnmarshalJSON(data []byte) error { } case "ordered": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/intervalsprefix.go b/typedapi/types/intervalsprefix.go index 731c784f6f..eee6fe1dfd 100644 --- a/typedapi/types/intervalsprefix.go +++ b/typedapi/types/intervalsprefix.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IntervalsPrefix type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L218-L233 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L218-L233 type IntervalsPrefix struct { // Analyzer Analyzer used to analyze the `prefix`. Analyzer *string `json:"analyzer,omitempty"` diff --git a/typedapi/types/intervalsquery.go b/typedapi/types/intervalsquery.go index ac67863209..1655a92482 100644 --- a/typedapi/types/intervalsquery.go +++ b/typedapi/types/intervalsquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IntervalsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L235-L263 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L235-L263 type IntervalsQuery struct { // AllOf Returns matches that span a combination of other rules. AllOf *IntervalsAllOf `json:"all_of,omitempty"` @@ -81,7 +81,7 @@ func (s *IntervalsQuery) UnmarshalJSON(data []byte) error { } case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/intervalswildcard.go b/typedapi/types/intervalswildcard.go index 17b3831985..7dd3bb1649 100644 --- a/typedapi/types/intervalswildcard.go +++ b/typedapi/types/intervalswildcard.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IntervalsWildcard type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L265-L280 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L265-L280 type IntervalsWildcard struct { // Analyzer Analyzer used to analyze the `pattern`. // Defaults to the top-level field's analyzer. diff --git a/typedapi/types/invertedindex.go b/typedapi/types/invertedindex.go index cd5eaf4975..ace83b5ceb 100644 --- a/typedapi/types/invertedindex.go +++ b/typedapi/types/invertedindex.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // InvertedIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L65-L73 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L68-L76 type InvertedIndex struct { Offsets uint `json:"offsets"` Payloads uint `json:"payloads"` diff --git a/typedapi/types/invocation.go b/typedapi/types/invocation.go index c3c45a7d6e..5b16fe6fa2 100644 --- a/typedapi/types/invocation.go +++ b/typedapi/types/invocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // Invocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/_types/SnapshotLifecycle.ts#L138-L141 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/_types/SnapshotLifecycle.ts#L138-L141 type Invocation struct { SnapshotName string `json:"snapshot_name"` Time DateTime `json:"time"` diff --git a/typedapi/types/invocations.go b/typedapi/types/invocations.go index 489f34b2ac..7f5ba69ffd 100644 --- a/typedapi/types/invocations.go +++ b/typedapi/types/invocations.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Invocations type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L44-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L44-L46 type Invocations struct { Total int64 `json:"total"` } @@ -52,7 +52,7 @@ func (s *Invocations) UnmarshalJSON(data []byte) error { switch t { case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/iostatdevice.go b/typedapi/types/iostatdevice.go index 86f9f48743..0324b1c6c6 100644 --- a/typedapi/types/iostatdevice.go +++ b/typedapi/types/iostatdevice.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IoStatDevice type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L730-L755 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L730-L755 type IoStatDevice struct { // DeviceName The Linux device name. DeviceName *string `json:"device_name,omitempty"` @@ -80,7 +80,7 @@ func (s *IoStatDevice) UnmarshalJSON(data []byte) error { s.DeviceName = &o case "operations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *IoStatDevice) UnmarshalJSON(data []byte) error { } case "read_kilobytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -110,7 +110,7 @@ func (s *IoStatDevice) UnmarshalJSON(data []byte) error { } case "read_operations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *IoStatDevice) UnmarshalJSON(data []byte) error { } case "write_kilobytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -140,7 +140,7 @@ func (s *IoStatDevice) UnmarshalJSON(data []byte) error { } case "write_operations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/iostats.go b/typedapi/types/iostats.go index 65470cdb54..12c60c0c9a 100644 --- a/typedapi/types/iostats.go +++ b/typedapi/types/iostats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // IoStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L718-L728 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L718-L728 type IoStats struct { // Devices Array of disk metrics for each device that is backing an Elasticsearch data // path. diff --git a/typedapi/types/ipfilter.go b/typedapi/types/ipfilter.go index 0e74430549..b83673716a 100644 --- a/typedapi/types/ipfilter.go +++ b/typedapi/types/ipfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IpFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L167-L170 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L167-L170 type IpFilter struct { Http bool `json:"http"` Transport bool `json:"transport"` @@ -53,7 +53,7 @@ func (s *IpFilter) UnmarshalJSON(data []byte) error { switch t { case "http": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -67,7 +67,7 @@ func (s *IpFilter) UnmarshalJSON(data []byte) error { } case "transport": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/ipprefixaggregate.go b/typedapi/types/ipprefixaggregate.go index 7b4329e879..d67bc5133f 100644 --- a/typedapi/types/ipprefixaggregate.go +++ b/typedapi/types/ipprefixaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // IpPrefixAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L629-L630 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L633-L634 type IpPrefixAggregate struct { Buckets BucketsIpPrefixBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/ipprefixaggregation.go b/typedapi/types/ipprefixaggregation.go index 56f86fe57c..4bfd0d9339 100644 --- a/typedapi/types/ipprefixaggregation.go +++ b/typedapi/types/ipprefixaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // IpPrefixAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L1114-L1143 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L1122-L1151 type IpPrefixAggregation struct { // AppendPrefixLength Defines whether the prefix length is appended to IP address keys in the // response. @@ -42,12 +42,10 @@ type IpPrefixAggregation struct { IsIpv6 *bool `json:"is_ipv6,omitempty"` // Keyed Defines whether buckets are returned as a hash rather than an array in the // response. - Keyed *bool `json:"keyed,omitempty"` - Meta Metadata `json:"meta,omitempty"` + Keyed *bool `json:"keyed,omitempty"` // MinDocCount Minimum number of documents in a bucket for it to be included in the // response. - MinDocCount *int64 `json:"min_doc_count,omitempty"` - Name *string `json:"name,omitempty"` + MinDocCount *int64 `json:"min_doc_count,omitempty"` // PrefixLength Length of the network prefix. For IPv4 addresses the accepted range is [0, // 32]. // For IPv6 addresses the accepted range is [0, 128]. @@ -70,7 +68,7 @@ func (s *IpPrefixAggregation) UnmarshalJSON(data []byte) error { switch t { case "append_prefix_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -89,7 +87,7 @@ func (s *IpPrefixAggregation) UnmarshalJSON(data []byte) error { } case "is_ipv6": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -103,7 +101,7 @@ func (s *IpPrefixAggregation) UnmarshalJSON(data []byte) error { } case "keyed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -116,13 +114,8 @@ func (s *IpPrefixAggregation) UnmarshalJSON(data []byte) error { s.Keyed = &v } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "min_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -136,21 +129,9 @@ func (s *IpPrefixAggregation) UnmarshalJSON(data []byte) error { s.MinDocCount = &f } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "prefix_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/ipprefixbucket.go b/typedapi/types/ipprefixbucket.go index 083383291d..0e574b5b82 100644 --- a/typedapi/types/ipprefixbucket.go +++ b/typedapi/types/ipprefixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // IpPrefixBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L632-L637 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L636-L641 type IpPrefixBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -58,7 +58,7 @@ func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -73,7 +73,7 @@ func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { } case "is_ipv6": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -112,7 +112,7 @@ func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { case "prefix_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -571,7 +571,7 @@ func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -621,7 +621,7 @@ func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -631,7 +631,7 @@ func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -648,7 +648,7 @@ func (s *IpPrefixBucket) UnmarshalJSON(data []byte) error { func (s IpPrefixBucket) MarshalJSON() ([]byte, error) { type opt IpPrefixBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/ipproperty.go b/typedapi/types/ipproperty.go index cf1a01397d..67dcc75b86 100644 --- a/typedapi/types/ipproperty.go +++ b/typedapi/types/ipproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // IpProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/specialized.ts#L59-L73 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/specialized.ts#L65-L79 type IpProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -74,7 +74,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -106,7 +106,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -131,7 +131,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -152,7 +152,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -242,12 +242,6 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -266,6 +260,18 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -422,6 +428,12 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -433,7 +445,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -448,7 +460,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -462,7 +474,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -507,7 +519,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -528,7 +540,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -618,12 +630,6 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -642,6 +648,18 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -798,6 +816,12 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -856,7 +880,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -870,7 +894,7 @@ func (s *IpProperty) UnmarshalJSON(data []byte) error { } case "time_series_dimension": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/iprangeaggregate.go b/typedapi/types/iprangeaggregate.go index b75bbc36d1..3d9341f4ea 100644 --- a/typedapi/types/iprangeaggregate.go +++ b/typedapi/types/iprangeaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // IpRangeAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L556-L558 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L560-L562 type IpRangeAggregate struct { Buckets BucketsIpRangeBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/iprangeaggregation.go b/typedapi/types/iprangeaggregation.go index b25ee71141..94ad5cca18 100644 --- a/typedapi/types/iprangeaggregation.go +++ b/typedapi/types/iprangeaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,17 +26,14 @@ import ( "errors" "fmt" "io" - "strconv" ) // IpRangeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L548-L557 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L550-L559 type IpRangeAggregation struct { // Field The date field whose values are used to build ranges. - Field *string `json:"field,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + Field *string `json:"field,omitempty"` // Ranges Array of IP ranges. Ranges []IpRangeAggregationRange `json:"ranges,omitempty"` } @@ -61,23 +58,6 @@ func (s *IpRangeAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Field", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "ranges": if err := dec.Decode(&s.Ranges); err != nil { return fmt.Errorf("%s | %w", "Ranges", err) diff --git a/typedapi/types/iprangeaggregationrange.go b/typedapi/types/iprangeaggregationrange.go index 73bb88db61..aee18e37de 100644 --- a/typedapi/types/iprangeaggregationrange.go +++ b/typedapi/types/iprangeaggregationrange.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,14 +31,14 @@ import ( // IpRangeAggregationRange type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L559-L572 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L561-L574 type IpRangeAggregationRange struct { // From Start of the range. - From string `json:"from,omitempty"` + From *string `json:"from,omitempty"` // Mask IP range defined as a CIDR mask. Mask *string `json:"mask,omitempty"` // To End of the range. - To string `json:"to,omitempty"` + To *string `json:"to,omitempty"` } func (s *IpRangeAggregationRange) UnmarshalJSON(data []byte) error { @@ -66,7 +66,7 @@ func (s *IpRangeAggregationRange) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.From = o + s.From = &o case "mask": var tmp json.RawMessage @@ -90,7 +90,7 @@ func (s *IpRangeAggregationRange) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.To = o + s.To = &o } } diff --git a/typedapi/types/iprangebucket.go b/typedapi/types/iprangebucket.go index 872569fc24..79f83718f7 100644 --- a/typedapi/types/iprangebucket.go +++ b/typedapi/types/iprangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // IpRangeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L560-L564 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L564-L568 type IpRangeBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -57,7 +57,7 @@ func (s *IpRangeBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -552,7 +552,7 @@ func (s *IpRangeBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -602,7 +602,7 @@ func (s *IpRangeBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -612,7 +612,7 @@ func (s *IpRangeBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -629,7 +629,7 @@ func (s *IpRangeBucket) UnmarshalJSON(data []byte) error { func (s IpRangeBucket) MarshalJSON() ([]byte, error) { type opt IpRangeBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/iprangeproperty.go b/typedapi/types/iprangeproperty.go index 06aebd20de..d7082fe6d4 100644 --- a/typedapi/types/iprangeproperty.go +++ b/typedapi/types/iprangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // IpRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/range.ts#L46-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/range.ts#L46-L48 type IpRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -67,7 +67,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,7 +113,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -138,7 +138,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -159,7 +159,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -249,12 +249,6 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -273,6 +267,18 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -429,6 +435,12 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -440,7 +452,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -455,7 +467,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -483,7 +495,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -504,7 +516,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -594,12 +606,6 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -618,6 +624,18 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -774,6 +792,12 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -796,7 +820,7 @@ func (s *IpRangeProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/job.go b/typedapi/types/job.go index 1e19d586de..2f6fe7dca8 100644 --- a/typedapi/types/job.go +++ b/typedapi/types/job.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Job type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Job.ts#L61-L180 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Job.ts#L61-L180 type Job struct { // AllowLazyOpen Advanced configuration option. // Specifies whether this job can open when there is insufficient machine @@ -154,7 +154,7 @@ func (s *Job) UnmarshalJSON(data []byte) error { switch t { case "allow_lazy_open": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -198,7 +198,7 @@ func (s *Job) UnmarshalJSON(data []byte) error { } case "daily_model_snapshot_retention_after_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -223,7 +223,7 @@ func (s *Job) UnmarshalJSON(data []byte) error { } case "deleting": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -291,7 +291,7 @@ func (s *Job) UnmarshalJSON(data []byte) error { } case "model_snapshot_retention_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -306,7 +306,7 @@ func (s *Job) UnmarshalJSON(data []byte) error { } case "renormalization_window_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -326,7 +326,7 @@ func (s *Job) UnmarshalJSON(data []byte) error { } case "results_retention_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/jobblocked.go b/typedapi/types/jobblocked.go index dde8416b4c..44b4baafa0 100644 --- a/typedapi/types/jobblocked.go +++ b/typedapi/types/jobblocked.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // JobBlocked type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Job.ts#L392-L395 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Job.ts#L392-L395 type JobBlocked struct { Reason jobblockedreason.JobBlockedReason `json:"reason"` TaskId TaskId `json:"task_id,omitempty"` diff --git a/typedapi/types/jobconfig.go b/typedapi/types/jobconfig.go index 7074e2a2da..778c12498f 100644 --- a/typedapi/types/jobconfig.go +++ b/typedapi/types/jobconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // JobConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Job.ts#L182-L283 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Job.ts#L182-L283 type JobConfig struct { // AllowLazyOpen Advanced configuration option. Specifies whether this job can open when there // is insufficient machine learning node capacity for it to be immediately @@ -135,7 +135,7 @@ func (s *JobConfig) UnmarshalJSON(data []byte) error { switch t { case "allow_lazy_open": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -169,7 +169,7 @@ func (s *JobConfig) UnmarshalJSON(data []byte) error { } case "daily_model_snapshot_retention_after_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -233,7 +233,7 @@ func (s *JobConfig) UnmarshalJSON(data []byte) error { } case "model_snapshot_retention_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -248,7 +248,7 @@ func (s *JobConfig) UnmarshalJSON(data []byte) error { } case "renormalization_window_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -268,7 +268,7 @@ func (s *JobConfig) UnmarshalJSON(data []byte) error { } case "results_retention_days": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/jobforecaststatistics.go b/typedapi/types/jobforecaststatistics.go index 9488579e9b..e77057959d 100644 --- a/typedapi/types/jobforecaststatistics.go +++ b/typedapi/types/jobforecaststatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // JobForecastStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Job.ts#L343-L350 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Job.ts#L343-L350 type JobForecastStatistics struct { ForecastedJobs int `json:"forecasted_jobs"` MemoryBytes *JobStatistics `json:"memory_bytes,omitempty"` @@ -58,7 +58,7 @@ func (s *JobForecastStatistics) UnmarshalJSON(data []byte) error { case "forecasted_jobs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *JobForecastStatistics) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/jobsrecord.go b/typedapi/types/jobsrecord.go index 6c64d35236..3189667acb 100644 --- a/typedapi/types/jobsrecord.go +++ b/typedapi/types/jobsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // JobsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/ml_jobs/types.ts#L24-L347 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/ml_jobs/types.ts#L24-L347 type JobsRecord struct { // AssignmentExplanation For open anomaly detection jobs only, contains messages relating to the // selection of a node to run the job. diff --git a/typedapi/types/jobstatistics.go b/typedapi/types/jobstatistics.go index 5b5cbcf456..359ae27ae1 100644 --- a/typedapi/types/jobstatistics.go +++ b/typedapi/types/jobstatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // JobStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Job.ts#L54-L59 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Job.ts#L54-L59 type JobStatistics struct { Avg Float64 `json:"avg"` Max Float64 `json:"max"` @@ -55,7 +55,7 @@ func (s *JobStatistics) UnmarshalJSON(data []byte) error { switch t { case "avg": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *JobStatistics) UnmarshalJSON(data []byte) error { } case "max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *JobStatistics) UnmarshalJSON(data []byte) error { } case "min": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -103,7 +103,7 @@ func (s *JobStatistics) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/jobstats.go b/typedapi/types/jobstats.go index 07acbe3837..28ec6dcb24 100644 --- a/typedapi/types/jobstats.go +++ b/typedapi/types/jobstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // JobStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Job.ts#L284-L330 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Job.ts#L284-L330 type JobStats struct { // AssignmentExplanation For open anomaly detection jobs only, contains messages relating to the // selection of a node to run the job. @@ -101,7 +101,7 @@ func (s *JobStats) UnmarshalJSON(data []byte) error { } case "deleting": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/jobtimingstats.go b/typedapi/types/jobtimingstats.go index 9096e6269f..e64fb0a42f 100644 --- a/typedapi/types/jobtimingstats.go +++ b/typedapi/types/jobtimingstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // JobTimingStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Job.ts#L332-L341 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Job.ts#L332-L341 type JobTimingStats struct { AverageBucketProcessingTimeMs Float64 `json:"average_bucket_processing_time_ms,omitempty"` BucketCount int64 `json:"bucket_count"` @@ -64,7 +64,7 @@ func (s *JobTimingStats) UnmarshalJSON(data []byte) error { } case "bucket_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/jobusage.go b/typedapi/types/jobusage.go index e414aedfe5..244ac4fed0 100644 --- a/typedapi/types/jobusage.go +++ b/typedapi/types/jobusage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // JobUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L364-L370 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L364-L370 type JobUsage struct { Count int `json:"count"` CreatedBy map[string]int64 `json:"created_by"` @@ -57,7 +57,7 @@ func (s *JobUsage) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/joinprocessor.go b/typedapi/types/joinprocessor.go index 1c23e2e0ac..21b29ee915 100644 --- a/typedapi/types/joinprocessor.go +++ b/typedapi/types/joinprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // JoinProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L801-L816 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L801-L816 type JoinProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -99,7 +99,7 @@ func (s *JoinProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/joinproperty.go b/typedapi/types/joinproperty.go index 02d1655d9b..3baab7bad0 100644 --- a/typedapi/types/joinproperty.go +++ b/typedapi/types/joinproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // JoinProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L83-L87 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L89-L93 type JoinProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` EagerGlobalOrdinals *bool `json:"eager_global_ordinals,omitempty"` @@ -67,7 +67,7 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { } case "eager_global_ordinals": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -108,7 +108,7 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -198,12 +198,6 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -222,6 +216,18 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -378,6 +384,12 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -389,7 +401,7 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -418,7 +430,7 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -439,7 +451,7 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -529,12 +541,6 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -553,6 +559,18 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -709,6 +727,12 @@ func (s *JoinProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { diff --git a/typedapi/types/jsonprocessor.go b/typedapi/types/jsonprocessor.go index e5aa597491..cbe3ed2f65 100644 --- a/typedapi/types/jsonprocessor.go +++ b/typedapi/types/jsonprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // JsonProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L818-L847 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L818-L847 type JsonProcessor struct { // AddToRoot Flag that forces the parsed JSON to be added at the top level of the // document. @@ -83,7 +83,7 @@ func (s *JsonProcessor) UnmarshalJSON(data []byte) error { switch t { case "add_to_root": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -102,7 +102,7 @@ func (s *JsonProcessor) UnmarshalJSON(data []byte) error { } case "allow_duplicate_keys": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -145,7 +145,7 @@ func (s *JsonProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/jvm.go b/typedapi/types/jvm.go index 0c25c40da1..92ae0a96f7 100644 --- a/typedapi/types/jvm.go +++ b/typedapi/types/jvm.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Jvm type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L811-L845 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L811-L845 type Jvm struct { // BufferPools Contains statistics about JVM buffer pools for the node. BufferPools map[string]NodeBufferPool `json:"buffer_pools,omitempty"` @@ -96,7 +96,7 @@ func (s *Jvm) UnmarshalJSON(data []byte) error { } case "timestamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -123,7 +123,7 @@ func (s *Jvm) UnmarshalJSON(data []byte) error { s.Uptime = &o case "uptime_in_millis": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/jvmclasses.go b/typedapi/types/jvmclasses.go index 7f8004ef06..37085e0c82 100644 --- a/typedapi/types/jvmclasses.go +++ b/typedapi/types/jvmclasses.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // JvmClasses type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L908-L921 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L908-L921 type JvmClasses struct { // CurrentLoadedCount Number of classes currently loaded by JVM. CurrentLoadedCount *int64 `json:"current_loaded_count,omitempty"` @@ -57,7 +57,7 @@ func (s *JvmClasses) UnmarshalJSON(data []byte) error { switch t { case "current_loaded_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *JvmClasses) UnmarshalJSON(data []byte) error { } case "total_loaded_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *JvmClasses) UnmarshalJSON(data []byte) error { } case "total_unloaded_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/jvmmemorystats.go b/typedapi/types/jvmmemorystats.go index 705ccaae94..71a665ef51 100644 --- a/typedapi/types/jvmmemorystats.go +++ b/typedapi/types/jvmmemorystats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // JvmMemoryStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L847-L876 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L847-L876 type JvmMemoryStats struct { // HeapCommittedInBytes Amount of memory, in bytes, available for use by the heap. HeapCommittedInBytes *int64 `json:"heap_committed_in_bytes,omitempty"` @@ -65,7 +65,7 @@ func (s *JvmMemoryStats) UnmarshalJSON(data []byte) error { switch t { case "heap_committed_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -80,7 +80,7 @@ func (s *JvmMemoryStats) UnmarshalJSON(data []byte) error { } case "heap_max_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *JvmMemoryStats) UnmarshalJSON(data []byte) error { } case "heap_used_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -110,7 +110,7 @@ func (s *JvmMemoryStats) UnmarshalJSON(data []byte) error { } case "heap_used_percent": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *JvmMemoryStats) UnmarshalJSON(data []byte) error { } case "non_heap_committed_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -140,7 +140,7 @@ func (s *JvmMemoryStats) UnmarshalJSON(data []byte) error { } case "non_heap_used_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/jvmstats.go b/typedapi/types/jvmstats.go index 1ae3a3d91a..0a65d2c6cf 100644 --- a/typedapi/types/jvmstats.go +++ b/typedapi/types/jvmstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // JvmStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_memory_stats/types.ts#L50-L63 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_memory_stats/types.ts#L50-L63 type JvmStats struct { // HeapMax Maximum amount of memory available for use by the heap. HeapMax ByteSize `json:"heap_max,omitempty"` @@ -71,7 +71,7 @@ func (s *JvmStats) UnmarshalJSON(data []byte) error { case "heap_max_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *JvmStats) UnmarshalJSON(data []byte) error { case "java_inference_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,7 +113,7 @@ func (s *JvmStats) UnmarshalJSON(data []byte) error { case "java_inference_max_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/jvmthreads.go b/typedapi/types/jvmthreads.go index 980f9df136..5f42c7656c 100644 --- a/typedapi/types/jvmthreads.go +++ b/typedapi/types/jvmthreads.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // JvmThreads type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L897-L906 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L897-L906 type JvmThreads struct { // Count Number of active threads in use by JVM. Count *int64 `json:"count,omitempty"` @@ -55,7 +55,7 @@ func (s *JvmThreads) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *JvmThreads) UnmarshalJSON(data []byte) error { } case "peak_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/keeptypestokenfilter.go b/typedapi/types/keeptypestokenfilter.go index 1140e87c11..784a35e976 100644 --- a/typedapi/types/keeptypestokenfilter.go +++ b/typedapi/types/keeptypestokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // KeepTypesTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L218-L222 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L220-L224 type KeepTypesTokenFilter struct { Mode *keeptypesmode.KeepTypesMode `json:"mode,omitempty"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/keepwordstokenfilter.go b/typedapi/types/keepwordstokenfilter.go index 79d3872141..a90815abd7 100644 --- a/typedapi/types/keepwordstokenfilter.go +++ b/typedapi/types/keepwordstokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // KeepWordsTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L224-L229 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L226-L231 type KeepWordsTokenFilter struct { KeepWords []string `json:"keep_words,omitempty"` KeepWordsCase *bool `json:"keep_words_case,omitempty"` @@ -61,7 +61,7 @@ func (s *KeepWordsTokenFilter) UnmarshalJSON(data []byte) error { } case "keep_words_case": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/keyedpercentiles.go b/typedapi/types/keyedpercentiles.go index 9b35ded88f..280a856dc0 100644 --- a/typedapi/types/keyedpercentiles.go +++ b/typedapi/types/keyedpercentiles.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // KeyedPercentiles type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L158-L158 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L158-L158 type KeyedPercentiles map[string]string func (s KeyedPercentiles) UnmarshalJSON(data []byte) error { @@ -51,7 +51,7 @@ func (s KeyedPercentiles) UnmarshalJSON(data []byte) error { if key, ok := t.(string); ok { - var tmp interface{} + var tmp any if err := dec.Decode(&tmp); err != nil { return err } diff --git a/typedapi/types/keyedprocessor.go b/typedapi/types/keyedprocessor.go index 2098116976..1237b669ab 100644 --- a/typedapi/types/keyedprocessor.go +++ b/typedapi/types/keyedprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // KeyedProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L379-L382 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L379-L382 type KeyedProcessor struct { Stats *Processor `json:"stats,omitempty"` Type *string `json:"type,omitempty"` diff --git a/typedapi/types/keyvalueprocessor.go b/typedapi/types/keyvalueprocessor.go index 27c8819202..91a0c84c37 100644 --- a/typedapi/types/keyvalueprocessor.go +++ b/typedapi/types/keyvalueprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // KeyValueProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L856-L908 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L856-L908 type KeyValueProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -138,7 +138,7 @@ func (s *KeyValueProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -152,7 +152,7 @@ func (s *KeyValueProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -188,7 +188,7 @@ func (s *KeyValueProcessor) UnmarshalJSON(data []byte) error { s.Prefix = &o case "strip_brackets": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/keywordanalyzer.go b/typedapi/types/keywordanalyzer.go index 9fb5a3d05c..bcf0920168 100644 --- a/typedapi/types/keywordanalyzer.go +++ b/typedapi/types/keywordanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // KeywordAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/analyzers.ts#L47-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/analyzers.ts#L47-L50 type KeywordAnalyzer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` diff --git a/typedapi/types/keywordmarkertokenfilter.go b/typedapi/types/keywordmarkertokenfilter.go index 343fb53bd5..75d19c8247 100644 --- a/typedapi/types/keywordmarkertokenfilter.go +++ b/typedapi/types/keywordmarkertokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // KeywordMarkerTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L231-L237 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L233-L239 type KeywordMarkerTokenFilter struct { IgnoreCase *bool `json:"ignore_case,omitempty"` Keywords []string `json:"keywords,omitempty"` @@ -57,7 +57,7 @@ func (s *KeywordMarkerTokenFilter) UnmarshalJSON(data []byte) error { switch t { case "ignore_case": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/keywordproperty.go b/typedapi/types/keywordproperty.go index 3d35ef27b1..e52099c285 100644 --- a/typedapi/types/keywordproperty.go +++ b/typedapi/types/keywordproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,11 +30,12 @@ import ( "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/dynamicmapping" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/indexoptions" + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/onscripterror" ) // KeywordProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L89-L105 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L95-L113 type KeywordProperty struct { Boost *Float64 `json:"boost,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -46,14 +47,16 @@ type KeywordProperty struct { Index *bool `json:"index,omitempty"` IndexOptions *indexoptions.IndexOptions `json:"index_options,omitempty"` // Meta Metadata about the field. - Meta map[string]string `json:"meta,omitempty"` - Normalizer *string `json:"normalizer,omitempty"` - Norms *bool `json:"norms,omitempty"` - NullValue *string `json:"null_value,omitempty"` - Properties map[string]Property `json:"properties,omitempty"` - Similarity *string `json:"similarity,omitempty"` - SplitQueriesOnWhitespace *bool `json:"split_queries_on_whitespace,omitempty"` - Store *bool `json:"store,omitempty"` + Meta map[string]string `json:"meta,omitempty"` + Normalizer *string `json:"normalizer,omitempty"` + Norms *bool `json:"norms,omitempty"` + NullValue *string `json:"null_value,omitempty"` + OnScriptError *onscripterror.OnScriptError `json:"on_script_error,omitempty"` + Properties map[string]Property `json:"properties,omitempty"` + Script Script `json:"script,omitempty"` + Similarity *string `json:"similarity,omitempty"` + SplitQueriesOnWhitespace *bool `json:"split_queries_on_whitespace,omitempty"` + Store *bool `json:"store,omitempty"` // TimeSeriesDimension For internal use by Elastic only. Marks the field as a time series dimension. // Defaults to false. TimeSeriesDimension *bool `json:"time_series_dimension,omitempty"` @@ -76,7 +79,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -108,7 +111,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -127,7 +130,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { } case "eager_global_ordinals": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -147,7 +150,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -168,7 +171,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -258,12 +261,6 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -282,6 +279,18 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -438,6 +447,12 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -449,7 +464,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -464,7 +479,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -503,7 +518,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { s.Normalizer = &o case "norms": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -528,6 +543,11 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { } s.NullValue = &o + case "on_script_error": + if err := dec.Decode(&s.OnScriptError); err != nil { + return fmt.Errorf("%s | %w", "OnScriptError", err) + } + case "properties": if s.Properties == nil { s.Properties = make(map[string]Property, 0) @@ -535,7 +555,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -556,7 +576,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -646,12 +666,6 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -670,6 +684,18 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -826,6 +852,12 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -835,6 +867,42 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { } } + case "script": + message := json.RawMessage{} + if err := dec.Decode(&message); err != nil { + return fmt.Errorf("%s | %w", "Script", err) + } + keyDec := json.NewDecoder(bytes.NewReader(message)) + for { + t, err := keyDec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return fmt.Errorf("%s | %w", "Script", err) + } + + switch t { + + case "lang", "options", "source": + o := NewInlineScript() + localDec := json.NewDecoder(bytes.NewReader(message)) + if err := localDec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Script", err) + } + s.Script = o + + case "id": + o := NewStoredScriptId() + localDec := json.NewDecoder(bytes.NewReader(message)) + if err := localDec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Script", err) + } + s.Script = o + + } + } + case "similarity": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { @@ -848,7 +916,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "split_queries_on_whitespace": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -862,7 +930,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { } case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -876,7 +944,7 @@ func (s *KeywordProperty) UnmarshalJSON(data []byte) error { } case "time_series_dimension": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -916,7 +984,9 @@ func (s KeywordProperty) MarshalJSON() ([]byte, error) { Normalizer: s.Normalizer, Norms: s.Norms, NullValue: s.NullValue, + OnScriptError: s.OnScriptError, Properties: s.Properties, + Script: s.Script, Similarity: s.Similarity, SplitQueriesOnWhitespace: s.SplitQueriesOnWhitespace, Store: s.Store, diff --git a/typedapi/types/keywordtokenizer.go b/typedapi/types/keywordtokenizer.go index 4b511b022b..dc85a5d033 100644 --- a/typedapi/types/keywordtokenizer.go +++ b/typedapi/types/keywordtokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // KeywordTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L62-L65 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L62-L65 type KeywordTokenizer struct { BufferSize int `json:"buffer_size"` Type string `json:"type,omitempty"` @@ -55,7 +55,7 @@ func (s *KeywordTokenizer) UnmarshalJSON(data []byte) error { case "buffer_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/kibanatoken.go b/typedapi/types/kibanatoken.go index 01deb87202..7aa8a30e09 100644 --- a/typedapi/types/kibanatoken.go +++ b/typedapi/types/kibanatoken.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // KibanaToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/enroll_kibana/Response.ts#L27-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/enroll_kibana/Response.ts#L27-L30 type KibanaToken struct { Name string `json:"name"` Value string `json:"value"` diff --git a/typedapi/types/knnquery.go b/typedapi/types/knnquery.go index d3b757dd16..8a08446fe6 100644 --- a/typedapi/types/knnquery.go +++ b/typedapi/types/knnquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,20 +31,21 @@ import ( // KnnQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Knn.ts#L27-L49 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Knn.ts#L54-L67 type KnnQuery struct { - // Boost Boost value to apply to kNN scores + // Boost Floating point number used to decrease or increase the relevance scores of + // the query. + // Boost values are relative to the default value of 1.0. + // A boost value between 0 and 1.0 decreases the relevance score. + // A value greater than 1.0 increases the relevance score. Boost *float32 `json:"boost,omitempty"` // Field The name of the vector field to search against Field string `json:"field"` // Filter Filters for the kNN search query Filter []Query `json:"filter,omitempty"` - // InnerHits If defined, each search hit will contain inner hits. - InnerHits *InnerHits `json:"inner_hits,omitempty"` - // K The final number of nearest neighbors to return as top hits - K int64 `json:"k"` // NumCandidates The number of nearest neighbor candidates to consider per shard - NumCandidates int64 `json:"num_candidates"` + NumCandidates *int `json:"num_candidates,omitempty"` + QueryName_ *string `json:"_name,omitempty"` // QueryVector The query vector QueryVector []float32 `json:"query_vector,omitempty"` // QueryVectorBuilder The query vector builder. You must provide a query_vector_builder or @@ -70,7 +71,7 @@ func (s *KnnQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -106,40 +107,33 @@ func (s *KnnQuery) UnmarshalJSON(data []byte) error { } } - case "inner_hits": - if err := dec.Decode(&s.InnerHits); err != nil { - return fmt.Errorf("%s | %w", "InnerHits", err) - } + case "num_candidates": - case "k": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.ParseInt(v, 10, 64) + value, err := strconv.Atoi(v) if err != nil { - return fmt.Errorf("%s | %w", "K", err) + return fmt.Errorf("%s | %w", "NumCandidates", err) } - s.K = value + s.NumCandidates = &value case float64: - f := int64(v) - s.K = f + f := int(v) + s.NumCandidates = &f } - case "num_candidates": - var tmp interface{} - dec.Decode(&tmp) - switch v := tmp.(type) { - case string: - value, err := strconv.ParseInt(v, 10, 64) - if err != nil { - return fmt.Errorf("%s | %w", "NumCandidates", err) - } - s.NumCandidates = value - case float64: - f := int64(v) - s.NumCandidates = f + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "QueryName_", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) } + s.QueryName_ = &o case "query_vector": if err := dec.Decode(&s.QueryVector); err != nil { @@ -152,7 +146,7 @@ func (s *KnnQuery) UnmarshalJSON(data []byte) error { } case "similarity": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/knnretriever.go b/typedapi/types/knnretriever.go new file mode 100644 index 0000000000..97d473ecaa --- /dev/null +++ b/typedapi/types/knnretriever.go @@ -0,0 +1,165 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// KnnRetriever type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Retriever.ts#L58-L71 +type KnnRetriever struct { + // Field The name of the vector field to search against. + Field string `json:"field"` + // Filter Query to filter the documents that can match. + Filter []Query `json:"filter,omitempty"` + // K Number of nearest neighbors to return as top hits. + K int `json:"k"` + // NumCandidates Number of nearest neighbor candidates to consider per shard. + NumCandidates int `json:"num_candidates"` + // QueryVector Query vector. Must have the same number of dimensions as the vector field you + // are searching against. You must provide a query_vector_builder or + // query_vector, but not both. + QueryVector []float32 `json:"query_vector,omitempty"` + // QueryVectorBuilder Defines a model to build a query vector. + QueryVectorBuilder *QueryVectorBuilder `json:"query_vector_builder,omitempty"` + // Similarity The minimum similarity required for a document to be considered a match. + Similarity *float32 `json:"similarity,omitempty"` +} + +func (s *KnnRetriever) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "field": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Field", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Field = o + + case "filter": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewQuery() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Filter", err) + } + + s.Filter = append(s.Filter, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Filter); err != nil { + return fmt.Errorf("%s | %w", "Filter", err) + } + } + + case "k": + + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return fmt.Errorf("%s | %w", "K", err) + } + s.K = value + case float64: + f := int(v) + s.K = f + } + + case "num_candidates": + + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return fmt.Errorf("%s | %w", "NumCandidates", err) + } + s.NumCandidates = value + case float64: + f := int(v) + s.NumCandidates = f + } + + case "query_vector": + if err := dec.Decode(&s.QueryVector); err != nil { + return fmt.Errorf("%s | %w", "QueryVector", err) + } + + case "query_vector_builder": + if err := dec.Decode(&s.QueryVectorBuilder); err != nil { + return fmt.Errorf("%s | %w", "QueryVectorBuilder", err) + } + + case "similarity": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return fmt.Errorf("%s | %w", "Similarity", err) + } + f := float32(value) + s.Similarity = &f + case float64: + f := float32(v) + s.Similarity = &f + } + + } + } + return nil +} + +// NewKnnRetriever returns a KnnRetriever. +func NewKnnRetriever() *KnnRetriever { + r := &KnnRetriever{} + + return r +} diff --git a/typedapi/types/knnsearch.go b/typedapi/types/knnsearch.go new file mode 100644 index 0000000000..ce432836be --- /dev/null +++ b/typedapi/types/knnsearch.go @@ -0,0 +1,182 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// KnnSearch type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Knn.ts#L30-L52 +type KnnSearch struct { + // Boost Boost value to apply to kNN scores + Boost *float32 `json:"boost,omitempty"` + // Field The name of the vector field to search against + Field string `json:"field"` + // Filter Filters for the kNN search query + Filter []Query `json:"filter,omitempty"` + // InnerHits If defined, each search hit will contain inner hits. + InnerHits *InnerHits `json:"inner_hits,omitempty"` + // K The final number of nearest neighbors to return as top hits + K *int `json:"k,omitempty"` + // NumCandidates The number of nearest neighbor candidates to consider per shard + NumCandidates *int `json:"num_candidates,omitempty"` + // QueryVector The query vector + QueryVector []float32 `json:"query_vector,omitempty"` + // QueryVectorBuilder The query vector builder. You must provide a query_vector_builder or + // query_vector, but not both. + QueryVectorBuilder *QueryVectorBuilder `json:"query_vector_builder,omitempty"` + // Similarity The minimum similarity for a vector to be considered a match + Similarity *float32 `json:"similarity,omitempty"` +} + +func (s *KnnSearch) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return fmt.Errorf("%s | %w", "Boost", err) + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return fmt.Errorf("%s | %w", "Field", err) + } + + case "filter": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewQuery() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Filter", err) + } + + s.Filter = append(s.Filter, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Filter); err != nil { + return fmt.Errorf("%s | %w", "Filter", err) + } + } + + case "inner_hits": + if err := dec.Decode(&s.InnerHits); err != nil { + return fmt.Errorf("%s | %w", "InnerHits", err) + } + + case "k": + + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return fmt.Errorf("%s | %w", "K", err) + } + s.K = &value + case float64: + f := int(v) + s.K = &f + } + + case "num_candidates": + + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return fmt.Errorf("%s | %w", "NumCandidates", err) + } + s.NumCandidates = &value + case float64: + f := int(v) + s.NumCandidates = &f + } + + case "query_vector": + if err := dec.Decode(&s.QueryVector); err != nil { + return fmt.Errorf("%s | %w", "QueryVector", err) + } + + case "query_vector_builder": + if err := dec.Decode(&s.QueryVectorBuilder); err != nil { + return fmt.Errorf("%s | %w", "QueryVectorBuilder", err) + } + + case "similarity": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return fmt.Errorf("%s | %w", "Similarity", err) + } + f := float32(value) + s.Similarity = &f + case float64: + f := float32(v) + s.Similarity = &f + } + + } + } + return nil +} + +// NewKnnSearch returns a KnnSearch. +func NewKnnSearch() *KnnSearch { + r := &KnnSearch{} + + return r +} diff --git a/typedapi/types/kstemtokenfilter.go b/typedapi/types/kstemtokenfilter.go index 4aff876788..5e8f15ff3b 100644 --- a/typedapi/types/kstemtokenfilter.go +++ b/typedapi/types/kstemtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // KStemTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L239-L241 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L241-L243 type KStemTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` diff --git a/typedapi/types/kuromojianalyzer.go b/typedapi/types/kuromojianalyzer.go index 34f7f13819..e5ae06e6e0 100644 --- a/typedapi/types/kuromojianalyzer.go +++ b/typedapi/types/kuromojianalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // KuromojiAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/kuromoji-plugin.ts#L25-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/kuromoji-plugin.ts#L25-L29 type KuromojiAnalyzer struct { Mode kuromojitokenizationmode.KuromojiTokenizationMode `json:"mode"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/kuromojiiterationmarkcharfilter.go b/typedapi/types/kuromojiiterationmarkcharfilter.go index f43d4c1d9f..14b4d6201c 100644 --- a/typedapi/types/kuromojiiterationmarkcharfilter.go +++ b/typedapi/types/kuromojiiterationmarkcharfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // KuromojiIterationMarkCharFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/kuromoji-plugin.ts#L31-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/kuromoji-plugin.ts#L31-L35 type KuromojiIterationMarkCharFilter struct { NormalizeKana bool `json:"normalize_kana"` NormalizeKanji bool `json:"normalize_kanji"` @@ -55,7 +55,7 @@ func (s *KuromojiIterationMarkCharFilter) UnmarshalJSON(data []byte) error { switch t { case "normalize_kana": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -69,7 +69,7 @@ func (s *KuromojiIterationMarkCharFilter) UnmarshalJSON(data []byte) error { } case "normalize_kanji": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/kuromojipartofspeechtokenfilter.go b/typedapi/types/kuromojipartofspeechtokenfilter.go index cb97da2ac5..4cea289e26 100644 --- a/typedapi/types/kuromojipartofspeechtokenfilter.go +++ b/typedapi/types/kuromojipartofspeechtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // KuromojiPartOfSpeechTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/kuromoji-plugin.ts#L37-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/kuromoji-plugin.ts#L37-L40 type KuromojiPartOfSpeechTokenFilter struct { Stoptags []string `json:"stoptags"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/kuromojireadingformtokenfilter.go b/typedapi/types/kuromojireadingformtokenfilter.go index b61479d4b0..0af588bd3d 100644 --- a/typedapi/types/kuromojireadingformtokenfilter.go +++ b/typedapi/types/kuromojireadingformtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // KuromojiReadingFormTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/kuromoji-plugin.ts#L42-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/kuromoji-plugin.ts#L42-L45 type KuromojiReadingFormTokenFilter struct { Type string `json:"type,omitempty"` UseRomaji bool `json:"use_romaji"` @@ -59,7 +59,7 @@ func (s *KuromojiReadingFormTokenFilter) UnmarshalJSON(data []byte) error { } case "use_romaji": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/kuromojistemmertokenfilter.go b/typedapi/types/kuromojistemmertokenfilter.go index 5ec64e39ec..637d99285c 100644 --- a/typedapi/types/kuromojistemmertokenfilter.go +++ b/typedapi/types/kuromojistemmertokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // KuromojiStemmerTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/kuromoji-plugin.ts#L47-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/kuromoji-plugin.ts#L47-L50 type KuromojiStemmerTokenFilter struct { MinimumLength int `json:"minimum_length"` Type string `json:"type,omitempty"` @@ -55,7 +55,7 @@ func (s *KuromojiStemmerTokenFilter) UnmarshalJSON(data []byte) error { case "minimum_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/kuromojitokenizer.go b/typedapi/types/kuromojitokenizer.go index 89126065f5..df26203ab0 100644 --- a/typedapi/types/kuromojitokenizer.go +++ b/typedapi/types/kuromojitokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // KuromojiTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/kuromoji-plugin.ts#L58-L67 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/kuromoji-plugin.ts#L58-L67 type KuromojiTokenizer struct { DiscardCompoundToken *bool `json:"discard_compound_token,omitempty"` DiscardPunctuation *bool `json:"discard_punctuation,omitempty"` @@ -62,7 +62,7 @@ func (s *KuromojiTokenizer) UnmarshalJSON(data []byte) error { switch t { case "discard_compound_token": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -76,7 +76,7 @@ func (s *KuromojiTokenizer) UnmarshalJSON(data []byte) error { } case "discard_punctuation": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *KuromojiTokenizer) UnmarshalJSON(data []byte) error { case "nbest_cost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/languageanalyzer.go b/typedapi/types/languageanalyzer.go index 2eaddf7dd2..290728e94d 100644 --- a/typedapi/types/languageanalyzer.go +++ b/typedapi/types/languageanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // LanguageAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/analyzers.ts#L52-L59 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/analyzers.ts#L52-L59 type LanguageAnalyzer struct { Language language.Language `json:"language"` StemExclusion []string `json:"stem_exclusion"` diff --git a/typedapi/types/languagecontext.go b/typedapi/types/languagecontext.go index 2dd2e59851..05e188683f 100644 --- a/typedapi/types/languagecontext.go +++ b/typedapi/types/languagecontext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // LanguageContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/get_script_languages/types.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/get_script_languages/types.ts#L22-L25 type LanguageContext struct { Contexts []string `json:"contexts"` Language scriptlanguage.ScriptLanguage `json:"language"` diff --git a/typedapi/types/laplacesmoothingmodel.go b/typedapi/types/laplacesmoothingmodel.go index dd3e7e879c..68e61f9b1e 100644 --- a/typedapi/types/laplacesmoothingmodel.go +++ b/typedapi/types/laplacesmoothingmodel.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // LaplaceSmoothingModel type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L427-L432 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L430-L435 type LaplaceSmoothingModel struct { // Alpha A constant that is added to all counts to balance weights. Alpha Float64 `json:"alpha"` @@ -53,7 +53,7 @@ func (s *LaplaceSmoothingModel) UnmarshalJSON(data []byte) error { switch t { case "alpha": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/latest.go b/typedapi/types/latest.go index 9a3322fb8d..e9afce375e 100644 --- a/typedapi/types/latest.go +++ b/typedapi/types/latest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // Latest type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/_types/Transform.ts#L47-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/_types/Transform.ts#L47-L52 type Latest struct { // Sort Specifies the date field that is used to identify the latest documents. Sort string `json:"sort"` diff --git a/typedapi/types/latlongeolocation.go b/typedapi/types/latlongeolocation.go index 40740767ca..fa411afd8e 100644 --- a/typedapi/types/latlongeolocation.go +++ b/typedapi/types/latlongeolocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // LatLonGeoLocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Geo.ts#L120-L129 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Geo.ts#L120-L129 type LatLonGeoLocation struct { // Lat Latitude Lat Float64 `json:"lat"` @@ -55,7 +55,7 @@ func (s *LatLonGeoLocation) UnmarshalJSON(data []byte) error { switch t { case "lat": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *LatLonGeoLocation) UnmarshalJSON(data []byte) error { } case "lon": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/learningtorank.go b/typedapi/types/learningtorank.go new file mode 100644 index 0000000000..f93b7607bd --- /dev/null +++ b/typedapi/types/learningtorank.go @@ -0,0 +1,89 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// LearningToRank type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/rescoring.ts#L88-L97 +type LearningToRank struct { + // ModelId The unique identifier of the trained model uploaded to Elasticsearch + ModelId string `json:"model_id"` + // Params Named parameters to be passed to the query templates used for feature + Params map[string]json.RawMessage `json:"params,omitempty"` +} + +func (s *LearningToRank) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "model_id": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "ModelId", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.ModelId = o + + case "params": + if s.Params == nil { + s.Params = make(map[string]json.RawMessage, 0) + } + if err := dec.Decode(&s.Params); err != nil { + return fmt.Errorf("%s | %w", "Params", err) + } + + } + } + return nil +} + +// NewLearningToRank returns a LearningToRank. +func NewLearningToRank() *LearningToRank { + r := &LearningToRank{ + Params: make(map[string]json.RawMessage, 0), + } + + return r +} diff --git a/typedapi/types/lengthtokenfilter.go b/typedapi/types/lengthtokenfilter.go index c2554fee00..154a678421 100644 --- a/typedapi/types/lengthtokenfilter.go +++ b/typedapi/types/lengthtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // LengthTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L243-L247 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L245-L249 type LengthTokenFilter struct { Max *int `json:"max,omitempty"` Min *int `json:"min,omitempty"` @@ -56,7 +56,7 @@ func (s *LengthTokenFilter) UnmarshalJSON(data []byte) error { case "max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *LengthTokenFilter) UnmarshalJSON(data []byte) error { case "min": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/lettertokenizer.go b/typedapi/types/lettertokenizer.go index 8cbb9bb3fb..c8e5d522ee 100644 --- a/typedapi/types/lettertokenizer.go +++ b/typedapi/types/lettertokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // LetterTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L67-L69 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L67-L69 type LetterTokenizer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` diff --git a/typedapi/types/license.go b/typedapi/types/license.go index 48577c642d..b0f6a8b93f 100644 --- a/typedapi/types/license.go +++ b/typedapi/types/license.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,13 +33,13 @@ import ( // License type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/license/_types/License.ts#L42-L53 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/license/_types/License.ts#L42-L53 type License struct { ExpiryDateInMillis int64 `json:"expiry_date_in_millis"` IssueDateInMillis int64 `json:"issue_date_in_millis"` IssuedTo string `json:"issued_to"` Issuer string `json:"issuer"` - MaxNodes int64 `json:"max_nodes,omitempty"` + MaxNodes *int64 `json:"max_nodes,omitempty"` MaxResourceUnits *int64 `json:"max_resource_units,omitempty"` Signature string `json:"signature"` StartDateInMillis *int64 `json:"start_date_in_millis,omitempty"` @@ -102,7 +102,7 @@ func (s *License) UnmarshalJSON(data []byte) error { } case "max_resource_units": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/licenseinformation.go b/typedapi/types/licenseinformation.go index 18b001e9d1..4f2007f25e 100644 --- a/typedapi/types/licenseinformation.go +++ b/typedapi/types/licenseinformation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // LicenseInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/license/get/types.ts#L25-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/license/get/types.ts#L25-L38 type LicenseInformation struct { ExpiryDate DateTime `json:"expiry_date,omitempty"` ExpiryDateInMillis *int64 `json:"expiry_date_in_millis,omitempty"` @@ -42,8 +42,8 @@ type LicenseInformation struct { IssueDateInMillis int64 `json:"issue_date_in_millis"` IssuedTo string `json:"issued_to"` Issuer string `json:"issuer"` - MaxNodes int64 `json:"max_nodes,omitempty"` - MaxResourceUnits int `json:"max_resource_units,omitempty"` + MaxNodes *int64 `json:"max_nodes,omitempty"` + MaxResourceUnits *int `json:"max_resource_units,omitempty"` StartDateInMillis int64 `json:"start_date_in_millis"` Status licensestatus.LicenseStatus `json:"status"` Type licensetype.LicenseType `json:"type"` diff --git a/typedapi/types/lifecycle.go b/typedapi/types/lifecycle.go index 851b7481fd..f4422f8e4a 100644 --- a/typedapi/types/lifecycle.go +++ b/typedapi/types/lifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // Lifecycle type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/get_lifecycle/types.ts#L24-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/get_lifecycle/types.ts#L24-L28 type Lifecycle struct { ModifiedDate DateTime `json:"modified_date"` Policy IlmPolicy `json:"policy"` diff --git a/typedapi/types/lifecycleexplain.go b/typedapi/types/lifecycleexplain.go index 4cc7b9841e..20f96bd777 100644 --- a/typedapi/types/lifecycleexplain.go +++ b/typedapi/types/lifecycleexplain.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // LifecycleExplainManaged // LifecycleExplainUnmanaged // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/explain_lifecycle/types.ts#L59-L62 -type LifecycleExplain interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/explain_lifecycle/types.ts#L59-L62 +type LifecycleExplain any diff --git a/typedapi/types/lifecycleexplainmanaged.go b/typedapi/types/lifecycleexplainmanaged.go index b1ecfa1a68..d26fc982c8 100644 --- a/typedapi/types/lifecycleexplainmanaged.go +++ b/typedapi/types/lifecycleexplainmanaged.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // LifecycleExplainManaged type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/explain_lifecycle/types.ts#L26-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/explain_lifecycle/types.ts#L26-L52 type LifecycleExplainManaged struct { Action *string `json:"action,omitempty"` ActionTime DateTime `json:"action_time,omitempty"` @@ -100,7 +100,7 @@ func (s *LifecycleExplainManaged) UnmarshalJSON(data []byte) error { case "failed_step_retry_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -130,7 +130,7 @@ func (s *LifecycleExplainManaged) UnmarshalJSON(data []byte) error { } case "is_auto_retryable_error": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/lifecycleexplainphaseexecution.go b/typedapi/types/lifecycleexplainphaseexecution.go index 764686c8b0..375a615c11 100644 --- a/typedapi/types/lifecycleexplainphaseexecution.go +++ b/typedapi/types/lifecycleexplainphaseexecution.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // LifecycleExplainPhaseExecution type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/explain_lifecycle/types.ts#L64-L68 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/explain_lifecycle/types.ts#L64-L68 type LifecycleExplainPhaseExecution struct { ModifiedDateInMillis int64 `json:"modified_date_in_millis"` Policy string `json:"policy"` diff --git a/typedapi/types/lifecycleexplainunmanaged.go b/typedapi/types/lifecycleexplainunmanaged.go index 76169e57c8..ff7a10167b 100644 --- a/typedapi/types/lifecycleexplainunmanaged.go +++ b/typedapi/types/lifecycleexplainunmanaged.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // LifecycleExplainUnmanaged type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/explain_lifecycle/types.ts#L54-L57 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/explain_lifecycle/types.ts#L54-L57 type LifecycleExplainUnmanaged struct { Index string `json:"index"` Managed bool `json:"managed,omitempty"` diff --git a/typedapi/types/like.go b/typedapi/types/like.go index 6588082313..caceadfced 100644 --- a/typedapi/types/like.go +++ b/typedapi/types/like.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // string // LikeDocument // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L186-L191 -type Like interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L195-L200 +type Like any diff --git a/typedapi/types/likedocument.go b/typedapi/types/likedocument.go index 3b708f648d..8823a8b3fa 100644 --- a/typedapi/types/likedocument.go +++ b/typedapi/types/likedocument.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // LikeDocument type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L165-L184 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L171-L193 type LikeDocument struct { // Doc A document not present in the index. Doc json.RawMessage `json:"doc,omitempty"` @@ -40,7 +40,8 @@ type LikeDocument struct { // Id_ ID of a document. Id_ *string `json:"_id,omitempty"` // Index_ Index of a document. - Index_ *string `json:"_index,omitempty"` + Index_ *string `json:"_index,omitempty"` + // PerFieldAnalyzer Overrides the default analyzer. PerFieldAnalyzer map[string]string `json:"per_field_analyzer,omitempty"` Routing *string `json:"routing,omitempty"` Version *int64 `json:"version,omitempty"` diff --git a/typedapi/types/limits.go b/typedapi/types/limits.go index 862f2a69df..9aedd6567a 100644 --- a/typedapi/types/limits.go +++ b/typedapi/types/limits.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Limits type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/info/types.ts#L34-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/info/types.ts#L34-L38 type Limits struct { EffectiveMaxModelMemoryLimit string `json:"effective_max_model_memory_limit"` MaxModelMemoryLimit *string `json:"max_model_memory_limit,omitempty"` diff --git a/typedapi/types/limittokencounttokenfilter.go b/typedapi/types/limittokencounttokenfilter.go index 1d0af8f64a..910e82134a 100644 --- a/typedapi/types/limittokencounttokenfilter.go +++ b/typedapi/types/limittokencounttokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // LimitTokenCountTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L249-L253 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L251-L255 type LimitTokenCountTokenFilter struct { ConsumeAllTokens *bool `json:"consume_all_tokens,omitempty"` MaxTokenCount Stringifiedinteger `json:"max_token_count,omitempty"` @@ -55,7 +55,7 @@ func (s *LimitTokenCountTokenFilter) UnmarshalJSON(data []byte) error { switch t { case "consume_all_tokens": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/linearinterpolationsmoothingmodel.go b/typedapi/types/linearinterpolationsmoothingmodel.go index cb71fcbe04..6cc7c038f7 100644 --- a/typedapi/types/linearinterpolationsmoothingmodel.go +++ b/typedapi/types/linearinterpolationsmoothingmodel.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // LinearInterpolationSmoothingModel type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L434-L438 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L437-L441 type LinearInterpolationSmoothingModel struct { BigramLambda Float64 `json:"bigram_lambda"` TrigramLambda Float64 `json:"trigram_lambda"` @@ -54,7 +54,7 @@ func (s *LinearInterpolationSmoothingModel) UnmarshalJSON(data []byte) error { switch t { case "bigram_lambda": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *LinearInterpolationSmoothingModel) UnmarshalJSON(data []byte) error { } case "trigram_lambda": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -86,7 +86,7 @@ func (s *LinearInterpolationSmoothingModel) UnmarshalJSON(data []byte) error { } case "unigram_lambda": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/linearmovingaverageaggregation.go b/typedapi/types/linearmovingaverageaggregation.go index f487258b20..a39d0b22b3 100644 --- a/typedapi/types/linearmovingaverageaggregation.go +++ b/typedapi/types/linearmovingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // LinearMovingAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L242-L245 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L242-L245 type LinearMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,10 +43,8 @@ type LinearMovingAverageAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` Minimize *bool `json:"minimize,omitempty"` Model string `json:"model,omitempty"` - Name *string `json:"name,omitempty"` Predict *int `json:"predict,omitempty"` Settings EmptyObject `json:"settings"` Window *int `json:"window,omitempty"` @@ -89,13 +87,8 @@ func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "minimize": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,21 +106,9 @@ func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Model", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "predict": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -148,7 +129,7 @@ func (s *LinearMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "window": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -174,10 +155,8 @@ func (s LinearMovingAverageAggregation) MarshalJSON() ([]byte, error) { BucketsPath: s.BucketsPath, Format: s.Format, GapPolicy: s.GapPolicy, - Meta: s.Meta, Minimize: s.Minimize, Model: s.Model, - Name: s.Name, Predict: s.Predict, Settings: s.Settings, Window: s.Window, diff --git a/typedapi/types/loggingaction.go b/typedapi/types/loggingaction.go index d70a2c0e5d..13e0bef50b 100644 --- a/typedapi/types/loggingaction.go +++ b/typedapi/types/loggingaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // LoggingAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L281-L285 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L281-L285 type LoggingAction struct { Category *string `json:"category,omitempty"` Level *string `json:"level,omitempty"` diff --git a/typedapi/types/loggingresult.go b/typedapi/types/loggingresult.go index a2622f1dd8..44f999c95a 100644 --- a/typedapi/types/loggingresult.go +++ b/typedapi/types/loggingresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // LoggingResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L287-L289 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L287-L289 type LoggingResult struct { LoggedText string `json:"logged_text"` } diff --git a/typedapi/types/logstashpipeline.go b/typedapi/types/logstashpipeline.go index 460ad93806..9ffc18bba3 100644 --- a/typedapi/types/logstashpipeline.go +++ b/typedapi/types/logstashpipeline.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // LogstashPipeline type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/logstash/_types/Pipeline.ts#L60-L92 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/logstash/_types/Pipeline.ts#L60-L92 type LogstashPipeline struct { // Description Description of the pipeline. // This description is not used by Elasticsearch or Logstash. diff --git a/typedapi/types/longnumberproperty.go b/typedapi/types/longnumberproperty.go index c34ecf8a33..ddfc84abdf 100644 --- a/typedapi/types/longnumberproperty.go +++ b/typedapi/types/longnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // LongNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L154-L157 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L162-L165 type LongNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -79,7 +79,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -150,7 +150,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -171,7 +171,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -261,12 +261,6 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -285,6 +279,18 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -441,6 +447,12 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -452,7 +464,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -467,7 +479,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -481,7 +493,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -503,7 +515,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { } case "null_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -529,7 +541,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -550,7 +562,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -640,12 +652,6 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -664,6 +670,18 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -820,6 +838,12 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -878,7 +902,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -892,7 +916,7 @@ func (s *LongNumberProperty) UnmarshalJSON(data []byte) error { } case "time_series_dimension": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/longrangeproperty.go b/typedapi/types/longrangeproperty.go index 911c81ce0b..09a43740dc 100644 --- a/typedapi/types/longrangeproperty.go +++ b/typedapi/types/longrangeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // LongRangeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/range.ts#L50-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/range.ts#L50-L52 type LongRangeProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -67,7 +67,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,7 +113,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -138,7 +138,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -159,7 +159,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -249,12 +249,6 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -273,6 +267,18 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -429,6 +435,12 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -440,7 +452,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -455,7 +467,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -483,7 +495,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -504,7 +516,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -594,12 +606,6 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -618,6 +624,18 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -774,6 +792,12 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -796,7 +820,7 @@ func (s *LongRangeProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/longraretermsaggregate.go b/typedapi/types/longraretermsaggregate.go index a8a13fbc07..8753afa124 100644 --- a/typedapi/types/longraretermsaggregate.go +++ b/typedapi/types/longraretermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // LongRareTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L431-L436 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L433-L438 type LongRareTermsAggregate struct { Buckets BucketsLongRareTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/longraretermsbucket.go b/typedapi/types/longraretermsbucket.go index 34c757e43d..71b0b14b3a 100644 --- a/typedapi/types/longraretermsbucket.go +++ b/typedapi/types/longraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // LongRareTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L438-L441 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L440-L443 type LongRareTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -56,7 +56,7 @@ func (s *LongRareTermsBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *LongRareTermsBucket) UnmarshalJSON(data []byte) error { } case "key": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -542,7 +542,7 @@ func (s *LongRareTermsBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -592,7 +592,7 @@ func (s *LongRareTermsBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -602,7 +602,7 @@ func (s *LongRareTermsBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -619,7 +619,7 @@ func (s *LongRareTermsBucket) UnmarshalJSON(data []byte) error { func (s LongRareTermsBucket) MarshalJSON() ([]byte, error) { type opt LongRareTermsBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/longtermsaggregate.go b/typedapi/types/longtermsaggregate.go index 67fec21d90..9533562e4d 100644 --- a/typedapi/types/longtermsaggregate.go +++ b/typedapi/types/longtermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // LongTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L399-L404 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L401-L406 type LongTermsAggregate struct { Buckets BucketsLongTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -76,7 +76,7 @@ func (s *LongTermsAggregate) UnmarshalJSON(data []byte) error { } case "doc_count_error_upper_bound": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *LongTermsAggregate) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/longtermsbucket.go b/typedapi/types/longtermsbucket.go index 311a9cbcd8..5e4a63c189 100644 --- a/typedapi/types/longtermsbucket.go +++ b/typedapi/types/longtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,13 +32,13 @@ import ( // LongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L406-L409 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L408-L411 type LongTermsBucket struct { - Aggregations map[string]Aggregate `json:"-"` - DocCount int64 `json:"doc_count"` - DocCountError *int64 `json:"doc_count_error,omitempty"` - Key int64 `json:"key"` - KeyAsString *string `json:"key_as_string,omitempty"` + Aggregations map[string]Aggregate `json:"-"` + DocCount int64 `json:"doc_count"` + DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` + Key int64 `json:"key"` + KeyAsString *string `json:"key_as_string,omitempty"` } func (s *LongTermsBucket) UnmarshalJSON(data []byte) error { @@ -57,7 +57,7 @@ func (s *LongTermsBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,23 +71,23 @@ func (s *LongTermsBucket) UnmarshalJSON(data []byte) error { s.DocCount = f } - case "doc_count_error": - var tmp interface{} + case "doc_count_error_upper_bound": + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return fmt.Errorf("%s | %w", "DocCountError", err) + return fmt.Errorf("%s | %w", "DocCountErrorUpperBound", err) } - s.DocCountError = &value + s.DocCountErrorUpperBound = &value case float64: f := int64(v) - s.DocCountError = &f + s.DocCountErrorUpperBound = &f } case "key": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -558,7 +558,7 @@ func (s *LongTermsBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -608,7 +608,7 @@ func (s *LongTermsBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -618,7 +618,7 @@ func (s *LongTermsBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -635,7 +635,7 @@ func (s *LongTermsBucket) UnmarshalJSON(data []byte) error { func (s LongTermsBucket) MarshalJSON() ([]byte, error) { type opt LongTermsBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/lowercasenormalizer.go b/typedapi/types/lowercasenormalizer.go index d80dacc732..c89cde8ac3 100644 --- a/typedapi/types/lowercasenormalizer.go +++ b/typedapi/types/lowercasenormalizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // LowercaseNormalizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/normalizers.ts#L26-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/normalizers.ts#L26-L28 type LowercaseNormalizer struct { Type string `json:"type,omitempty"` } diff --git a/typedapi/types/lowercaseprocessor.go b/typedapi/types/lowercaseprocessor.go index c716b26c2b..39c9daeaad 100644 --- a/typedapi/types/lowercaseprocessor.go +++ b/typedapi/types/lowercaseprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // LowercaseProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L910-L926 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L910-L926 type LowercaseProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -100,7 +100,7 @@ func (s *LowercaseProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *LowercaseProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/lowercasetokenfilter.go b/typedapi/types/lowercasetokenfilter.go index e74c0e4d01..6d46e9f8a6 100644 --- a/typedapi/types/lowercasetokenfilter.go +++ b/typedapi/types/lowercasetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // LowercaseTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L255-L258 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L257-L260 type LowercaseTokenFilter struct { Language *string `json:"language,omitempty"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/lowercasetokenizer.go b/typedapi/types/lowercasetokenizer.go index df4e007990..6bae4e9663 100644 --- a/typedapi/types/lowercasetokenizer.go +++ b/typedapi/types/lowercasetokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // LowercaseTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L71-L73 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L71-L73 type LowercaseTokenizer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` diff --git a/typedapi/types/machinelearning.go b/typedapi/types/machinelearning.go index d1353cb60b..d460b7c074 100644 --- a/typedapi/types/machinelearning.go +++ b/typedapi/types/machinelearning.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MachineLearning type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L372-L379 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L372-L379 type MachineLearning struct { Available bool `json:"available"` DataFrameAnalyticsJobs MlDataFrameAnalyticsJobs `json:"data_frame_analytics_jobs"` @@ -60,7 +60,7 @@ func (s *MachineLearning) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *MachineLearning) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -115,7 +115,7 @@ func (s *MachineLearning) UnmarshalJSON(data []byte) error { case "node_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/manageuserprivileges.go b/typedapi/types/manageuserprivileges.go index 41608047ec..d0fc69b3fb 100644 --- a/typedapi/types/manageuserprivileges.go +++ b/typedapi/types/manageuserprivileges.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ManageUserPrivileges type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/Privileges.ts#L197-L199 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/Privileges.ts#L337-L339 type ManageUserPrivileges struct { Applications []string `json:"applications"` } diff --git a/typedapi/types/mapboxvectortiles.go b/typedapi/types/mapboxvectortiles.go index 1582f9acd4..1782eb952d 100644 --- a/typedapi/types/mapboxvectortiles.go +++ b/typedapi/types/mapboxvectortiles.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // MapboxVectorTiles type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Binary.ts#L21-L21 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Binary.ts#L21-L21 type MapboxVectorTiles []byte diff --git a/typedapi/types/mappingcharfilter.go b/typedapi/types/mappingcharfilter.go index 347960ace1..a507022639 100644 --- a/typedapi/types/mappingcharfilter.go +++ b/typedapi/types/mappingcharfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MappingCharFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/char_filters.ts#L47-L51 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/char_filters.ts#L48-L52 type MappingCharFilter struct { Mappings []string `json:"mappings,omitempty"` MappingsPath *string `json:"mappings_path,omitempty"` diff --git a/typedapi/types/mappinglimitsettings.go b/typedapi/types/mappinglimitsettings.go index 6fca505a82..9f3133d32c 100644 --- a/typedapi/types/mappinglimitsettings.go +++ b/typedapi/types/mappinglimitsettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MappingLimitSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L409-L422 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L411-L424 type MappingLimitSettings struct { Coerce *bool `json:"coerce,omitempty"` Depth *MappingLimitSettingsDepth `json:"depth,omitempty"` @@ -59,7 +59,7 @@ func (s *MappingLimitSettings) UnmarshalJSON(data []byte) error { switch t { case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *MappingLimitSettings) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/mappinglimitsettingsdepth.go b/typedapi/types/mappinglimitsettingsdepth.go index 8b4c38a722..23f798bd10 100644 --- a/typedapi/types/mappinglimitsettingsdepth.go +++ b/typedapi/types/mappinglimitsettingsdepth.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,13 +31,13 @@ import ( // MappingLimitSettingsDepth type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L434-L441 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L445-L452 type MappingLimitSettingsDepth struct { // Limit The maximum depth for a field, which is measured as the number of inner // objects. For instance, if all fields are defined // at the root object level, then the depth is 1. If there is one object // mapping, then the depth is 2, etc. - Limit *int `json:"limit,omitempty"` + Limit *int64 `json:"limit,omitempty"` } func (s *MappingLimitSettingsDepth) UnmarshalJSON(data []byte) error { @@ -56,18 +56,17 @@ func (s *MappingLimitSettingsDepth) UnmarshalJSON(data []byte) error { switch t { case "limit": - - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.Atoi(v) + value, err := strconv.ParseInt(v, 10, 64) if err != nil { return fmt.Errorf("%s | %w", "Limit", err) } s.Limit = &value case float64: - f := int(v) + f := int64(v) s.Limit = &f } diff --git a/typedapi/types/mappinglimitsettingsdimensionfields.go b/typedapi/types/mappinglimitsettingsdimensionfields.go index a9cacc32b4..68b10df16c 100644 --- a/typedapi/types/mappinglimitsettingsdimensionfields.go +++ b/typedapi/types/mappinglimitsettingsdimensionfields.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,13 +31,13 @@ import ( // MappingLimitSettingsDimensionFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L471-L477 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L482-L488 type MappingLimitSettingsDimensionFields struct { // Limit [preview] This functionality is in technical preview and may be changed or // removed in a future release. // Elastic will work to fix any issues, but features in technical preview are // not subject to the support SLA of official GA features. - Limit *int `json:"limit,omitempty"` + Limit *int64 `json:"limit,omitempty"` } func (s *MappingLimitSettingsDimensionFields) UnmarshalJSON(data []byte) error { @@ -56,18 +56,17 @@ func (s *MappingLimitSettingsDimensionFields) UnmarshalJSON(data []byte) error { switch t { case "limit": - - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.Atoi(v) + value, err := strconv.ParseInt(v, 10, 64) if err != nil { return fmt.Errorf("%s | %w", "Limit", err) } s.Limit = &value case float64: - f := int(v) + f := int64(v) s.Limit = &f } diff --git a/typedapi/types/mappinglimitsettingsfieldnamelength.go b/typedapi/types/mappinglimitsettingsfieldnamelength.go index 75271d7249..b0c19fd546 100644 --- a/typedapi/types/mappinglimitsettingsfieldnamelength.go +++ b/typedapi/types/mappinglimitsettingsfieldnamelength.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MappingLimitSettingsFieldNameLength type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L462-L469 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L473-L480 type MappingLimitSettingsFieldNameLength struct { // Limit Setting for the maximum length of a field name. This setting isn’t really // something that addresses mappings explosion but @@ -58,7 +58,7 @@ func (s *MappingLimitSettingsFieldNameLength) UnmarshalJSON(data []byte) error { switch t { case "limit": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/mappinglimitsettingsnestedfields.go b/typedapi/types/mappinglimitsettingsnestedfields.go index 49058dfa6b..41311af5d5 100644 --- a/typedapi/types/mappinglimitsettingsnestedfields.go +++ b/typedapi/types/mappinglimitsettingsnestedfields.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,14 +31,14 @@ import ( // MappingLimitSettingsNestedFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L443-L451 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L454-L462 type MappingLimitSettingsNestedFields struct { // Limit The maximum number of distinct nested mappings in an index. The nested type // should only be used in special cases, when // arrays of objects need to be queried independently of each other. To // safeguard against poorly designed mappings, this // setting limits the number of unique nested types per index. - Limit *int `json:"limit,omitempty"` + Limit *int64 `json:"limit,omitempty"` } func (s *MappingLimitSettingsNestedFields) UnmarshalJSON(data []byte) error { @@ -57,18 +57,17 @@ func (s *MappingLimitSettingsNestedFields) UnmarshalJSON(data []byte) error { switch t { case "limit": - - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.Atoi(v) + value, err := strconv.ParseInt(v, 10, 64) if err != nil { return fmt.Errorf("%s | %w", "Limit", err) } s.Limit = &value case float64: - f := int(v) + f := int64(v) s.Limit = &f } diff --git a/typedapi/types/mappinglimitsettingsnestedobjects.go b/typedapi/types/mappinglimitsettingsnestedobjects.go index 77befa19e0..dd77703f32 100644 --- a/typedapi/types/mappinglimitsettingsnestedobjects.go +++ b/typedapi/types/mappinglimitsettingsnestedobjects.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,13 +31,13 @@ import ( // MappingLimitSettingsNestedObjects type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L453-L460 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L464-L471 type MappingLimitSettingsNestedObjects struct { // Limit The maximum number of nested JSON objects that a single document can contain // across all nested types. This limit helps // to prevent out of memory errors when a document contains too many nested // objects. - Limit *int `json:"limit,omitempty"` + Limit *int64 `json:"limit,omitempty"` } func (s *MappingLimitSettingsNestedObjects) UnmarshalJSON(data []byte) error { @@ -56,18 +56,17 @@ func (s *MappingLimitSettingsNestedObjects) UnmarshalJSON(data []byte) error { switch t { case "limit": - - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.Atoi(v) + value, err := strconv.ParseInt(v, 10, 64) if err != nil { return fmt.Errorf("%s | %w", "Limit", err) } s.Limit = &value case float64: - f := int(v) + f := int64(v) s.Limit = &f } diff --git a/typedapi/types/mappinglimitsettingstotalfields.go b/typedapi/types/mappinglimitsettingstotalfields.go index 50b45a0236..ec3b5d3199 100644 --- a/typedapi/types/mappinglimitsettingstotalfields.go +++ b/typedapi/types/mappinglimitsettingstotalfields.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,15 +31,26 @@ import ( // MappingLimitSettingsTotalFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L424-L432 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L426-L443 type MappingLimitSettingsTotalFields struct { + // IgnoreDynamicBeyondLimit This setting determines what happens when a dynamically mapped field would + // exceed the total fields limit. When set + // to false (the default), the index request of the document that tries to add a + // dynamic field to the mapping will fail + // with the message Limit of total fields [X] has been exceeded. When set to + // true, the index request will not fail. + // Instead, fields that would exceed the limit are not added to the mapping, + // similar to dynamic: false. + // The fields that were not added to the mapping will be added to the _ignored + // field. + IgnoreDynamicBeyondLimit *bool `json:"ignore_dynamic_beyond_limit,omitempty"` // Limit The maximum number of fields in an index. Field and object mappings, as well // as field aliases count towards this limit. // The limit is in place to prevent mappings and searches from becoming too // large. Higher values can lead to performance // degradations and memory issues, especially in clusters with a high load or // few resources. - Limit *int `json:"limit,omitempty"` + Limit *int64 `json:"limit,omitempty"` } func (s *MappingLimitSettingsTotalFields) UnmarshalJSON(data []byte) error { @@ -57,19 +68,32 @@ func (s *MappingLimitSettingsTotalFields) UnmarshalJSON(data []byte) error { switch t { - case "limit": + case "ignore_dynamic_beyond_limit": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "IgnoreDynamicBeyondLimit", err) + } + s.IgnoreDynamicBeyondLimit = &value + case bool: + s.IgnoreDynamicBeyondLimit = &v + } - var tmp interface{} + case "limit": + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.Atoi(v) + value, err := strconv.ParseInt(v, 10, 64) if err != nil { return fmt.Errorf("%s | %w", "Limit", err) } s.Limit = &value case float64: - f := int(v) + f := int64(v) s.Limit = &f } diff --git a/typedapi/types/mappingstats.go b/typedapi/types/mappingstats.go index d386cca863..ae5f2e25b6 100644 --- a/typedapi/types/mappingstats.go +++ b/typedapi/types/mappingstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MappingStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/types.ts#L186-L190 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/types.ts#L186-L190 type MappingStats struct { TotalCount int64 `json:"total_count"` TotalEstimatedOverhead ByteSize `json:"total_estimated_overhead,omitempty"` @@ -54,7 +54,7 @@ func (s *MappingStats) UnmarshalJSON(data []byte) error { switch t { case "total_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -74,7 +74,7 @@ func (s *MappingStats) UnmarshalJSON(data []byte) error { } case "total_estimated_overhead_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/masterisstableindicator.go b/typedapi/types/masterisstableindicator.go index 3096daffca..0d0e3ecb3e 100644 --- a/typedapi/types/masterisstableindicator.go +++ b/typedapi/types/masterisstableindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // MasterIsStableIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L79-L83 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L79-L83 type MasterIsStableIndicator struct { Details *MasterIsStableIndicatorDetails `json:"details,omitempty"` Diagnosis []Diagnosis `json:"diagnosis,omitempty"` diff --git a/typedapi/types/masterisstableindicatorclusterformationnode.go b/typedapi/types/masterisstableindicatorclusterformationnode.go index 0978a5873f..ff3a5de136 100644 --- a/typedapi/types/masterisstableindicatorclusterformationnode.go +++ b/typedapi/types/masterisstableindicatorclusterformationnode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MasterIsStableIndicatorClusterFormationNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L98-L102 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L98-L102 type MasterIsStableIndicatorClusterFormationNode struct { ClusterFormationMessage string `json:"cluster_formation_message"` Name *string `json:"name,omitempty"` diff --git a/typedapi/types/masterisstableindicatordetails.go b/typedapi/types/masterisstableindicatordetails.go index bd0e6a761e..d00cf6951b 100644 --- a/typedapi/types/masterisstableindicatordetails.go +++ b/typedapi/types/masterisstableindicatordetails.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // MasterIsStableIndicatorDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L84-L89 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L84-L89 type MasterIsStableIndicatorDetails struct { ClusterFormation []MasterIsStableIndicatorClusterFormationNode `json:"cluster_formation,omitempty"` CurrentMaster IndicatorNode `json:"current_master"` diff --git a/typedapi/types/masterisstableindicatorexceptionfetchinghistory.go b/typedapi/types/masterisstableindicatorexceptionfetchinghistory.go index acac32265e..21fa979c4f 100644 --- a/typedapi/types/masterisstableindicatorexceptionfetchinghistory.go +++ b/typedapi/types/masterisstableindicatorexceptionfetchinghistory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MasterIsStableIndicatorExceptionFetchingHistory type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L94-L97 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L94-L97 type MasterIsStableIndicatorExceptionFetchingHistory struct { Message string `json:"message"` StackTrace string `json:"stack_trace"` diff --git a/typedapi/types/masterrecord.go b/typedapi/types/masterrecord.go index 70b5034db8..76169bde30 100644 --- a/typedapi/types/masterrecord.go +++ b/typedapi/types/masterrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MasterRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/master/types.ts#L20-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/master/types.ts#L20-L39 type MasterRecord struct { // Host host name Host *string `json:"host,omitempty"` diff --git a/typedapi/types/matchallquery.go b/typedapi/types/matchallquery.go index 12b7611064..1885e31c42 100644 --- a/typedapi/types/matchallquery.go +++ b/typedapi/types/matchallquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MatchAllQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/MatchAllQuery.ts#L22-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/MatchAllQuery.ts#L22-L22 type MatchAllQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -58,7 +58,7 @@ func (s *MatchAllQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/matchboolprefixquery.go b/typedapi/types/matchboolprefixquery.go index 267850b692..6481a8cc66 100644 --- a/typedapi/types/matchboolprefixquery.go +++ b/typedapi/types/matchboolprefixquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // MatchBoolPrefixQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L349-L403 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L349-L403 type MatchBoolPrefixQuery struct { // Analyzer Analyzer used to convert the text in the query value into tokens. Analyzer *string `json:"analyzer,omitempty"` @@ -113,7 +113,7 @@ func (s *MatchBoolPrefixQuery) UnmarshalJSON(data []byte) error { s.Analyzer = &o case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -139,7 +139,7 @@ func (s *MatchBoolPrefixQuery) UnmarshalJSON(data []byte) error { } case "fuzzy_transpositions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -154,7 +154,7 @@ func (s *MatchBoolPrefixQuery) UnmarshalJSON(data []byte) error { case "max_expansions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -180,7 +180,7 @@ func (s *MatchBoolPrefixQuery) UnmarshalJSON(data []byte) error { case "prefix_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/matchedfield.go b/typedapi/types/matchedfield.go index c66e15ee46..410c6ab1e9 100644 --- a/typedapi/types/matchedfield.go +++ b/typedapi/types/matchedfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MatchedField type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/text_structure/test_grok_pattern/types.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/text_structure/test_grok_pattern/types.ts#L23-L27 type MatchedField struct { Length int `json:"length"` Match string `json:"match"` @@ -55,7 +55,7 @@ func (s *MatchedField) UnmarshalJSON(data []byte) error { case "length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *MatchedField) UnmarshalJSON(data []byte) error { case "offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/matchedtext.go b/typedapi/types/matchedtext.go index 403875a519..272f63a301 100644 --- a/typedapi/types/matchedtext.go +++ b/typedapi/types/matchedtext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MatchedText type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/text_structure/test_grok_pattern/types.ts#L29-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/text_structure/test_grok_pattern/types.ts#L29-L32 type MatchedText struct { Fields map[string][]MatchedField `json:"fields,omitempty"` Matched bool `json:"matched"` @@ -61,7 +61,7 @@ func (s *MatchedText) UnmarshalJSON(data []byte) error { } case "matched": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/matchnonequery.go b/typedapi/types/matchnonequery.go index 96c2e34708..3071256b20 100644 --- a/typedapi/types/matchnonequery.go +++ b/typedapi/types/matchnonequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MatchNoneQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/MatchNoneQuery.ts#L22-L22 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/MatchNoneQuery.ts#L22-L22 type MatchNoneQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -58,7 +58,7 @@ func (s *MatchNoneQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/matchonlytextproperty.go b/typedapi/types/matchonlytextproperty.go index 204343d1e8..7b2be4eaf5 100644 --- a/typedapi/types/matchonlytextproperty.go +++ b/typedapi/types/matchonlytextproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MatchOnlyTextProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L216-L241 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L230-L255 type MatchOnlyTextProperty struct { // CopyTo Allows you to copy the values of multiple fields into a group // field, which can then be queried as a single field. @@ -83,7 +83,7 @@ func (s *MatchOnlyTextProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -104,7 +104,7 @@ func (s *MatchOnlyTextProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -194,12 +194,6 @@ func (s *MatchOnlyTextProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -218,6 +212,18 @@ func (s *MatchOnlyTextProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -374,6 +380,12 @@ func (s *MatchOnlyTextProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { diff --git a/typedapi/types/matchphraseprefixquery.go b/typedapi/types/matchphraseprefixquery.go index 2b55a56633..f00ac973b9 100644 --- a/typedapi/types/matchphraseprefixquery.go +++ b/typedapi/types/matchphraseprefixquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // MatchPhrasePrefixQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L428-L454 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L428-L454 type MatchPhrasePrefixQuery struct { // Analyzer Analyzer used to convert text in the query value into tokens. Analyzer *string `json:"analyzer,omitempty"` @@ -93,7 +93,7 @@ func (s *MatchPhrasePrefixQuery) UnmarshalJSON(data []byte) error { s.Analyzer = &o case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -110,7 +110,7 @@ func (s *MatchPhrasePrefixQuery) UnmarshalJSON(data []byte) error { case "max_expansions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -150,7 +150,7 @@ func (s *MatchPhrasePrefixQuery) UnmarshalJSON(data []byte) error { case "slop": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/matchphrasequery.go b/typedapi/types/matchphrasequery.go index 3e79e8fefd..4d186038cb 100644 --- a/typedapi/types/matchphrasequery.go +++ b/typedapi/types/matchphrasequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // MatchPhraseQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L405-L426 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L405-L426 type MatchPhraseQuery struct { // Analyzer Analyzer used to convert the text in the query value into tokens. Analyzer *string `json:"analyzer,omitempty"` @@ -90,7 +90,7 @@ func (s *MatchPhraseQuery) UnmarshalJSON(data []byte) error { s.Analyzer = &o case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -131,7 +131,7 @@ func (s *MatchPhraseQuery) UnmarshalJSON(data []byte) error { case "slop": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/matchquery.go b/typedapi/types/matchquery.go index 593a3fe813..5653df856d 100644 --- a/typedapi/types/matchquery.go +++ b/typedapi/types/matchquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // MatchQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L282-L347 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L282-L347 type MatchQuery struct { // Analyzer Analyzer used to convert the text in the query value into tokens. Analyzer *string `json:"analyzer,omitempty"` @@ -111,7 +111,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { s.Analyzer = &o case "auto_generate_synonyms_phrase_query": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { } case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -141,7 +141,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { } case "cutoff_frequency": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -167,7 +167,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { } case "fuzzy_transpositions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -181,7 +181,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { } case "lenient": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -196,7 +196,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { case "max_expansions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -222,7 +222,7 @@ func (s *MatchQuery) UnmarshalJSON(data []byte) error { case "prefix_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/matrixaggregation.go b/typedapi/types/matrixaggregation.go index 21a62bd3c1..469fb416dc 100644 --- a/typedapi/types/matrixaggregation.go +++ b/typedapi/types/matrixaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,20 +26,17 @@ import ( "errors" "fmt" "io" - "strconv" ) // MatrixAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/matrix.ts#L26-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/matrix.ts#L26-L36 type MatrixAggregation struct { // Fields An array of fields for computing the statistics. Fields []string `json:"fields,omitempty"` - Meta Metadata `json:"meta,omitempty"` // Missing The value to apply to documents that do not have a value. // By default, documents without a value are ignored. Missing map[string]Float64 `json:"missing,omitempty"` - Name *string `json:"name,omitempty"` } func (s *MatrixAggregation) UnmarshalJSON(data []byte) error { @@ -73,11 +70,6 @@ func (s *MatrixAggregation) UnmarshalJSON(data []byte) error { } } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "missing": if s.Missing == nil { s.Missing = make(map[string]Float64, 0) @@ -86,18 +78,6 @@ func (s *MatrixAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Missing", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/matrixstatsaggregate.go b/typedapi/types/matrixstatsaggregate.go index 69c7ddcd4b..40ad3e8468 100644 --- a/typedapi/types/matrixstatsaggregate.go +++ b/typedapi/types/matrixstatsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MatrixStatsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L757-L761 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L764-L768 type MatrixStatsAggregate struct { DocCount int64 `json:"doc_count"` Fields []MatrixStatsFields `json:"fields,omitempty"` @@ -54,7 +54,7 @@ func (s *MatrixStatsAggregate) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/matrixstatsaggregation.go b/typedapi/types/matrixstatsaggregation.go index 768aeb23b7..c5582e8172 100644 --- a/typedapi/types/matrixstatsaggregation.go +++ b/typedapi/types/matrixstatsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,24 +26,21 @@ import ( "errors" "fmt" "io" - "strconv" "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/sortmode" ) // MatrixStatsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/matrix.ts#L38-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/matrix.ts#L38-L44 type MatrixStatsAggregation struct { // Fields An array of fields for computing the statistics. Fields []string `json:"fields,omitempty"` - Meta Metadata `json:"meta,omitempty"` // Missing The value to apply to documents that do not have a value. // By default, documents without a value are ignored. Missing map[string]Float64 `json:"missing,omitempty"` // Mode Array value the aggregation will use for array or multi-valued fields. Mode *sortmode.SortMode `json:"mode,omitempty"` - Name *string `json:"name,omitempty"` } func (s *MatrixStatsAggregation) UnmarshalJSON(data []byte) error { @@ -77,11 +74,6 @@ func (s *MatrixStatsAggregation) UnmarshalJSON(data []byte) error { } } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "missing": if s.Missing == nil { s.Missing = make(map[string]Float64, 0) @@ -95,18 +87,6 @@ func (s *MatrixStatsAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Mode", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/matrixstatsfields.go b/typedapi/types/matrixstatsfields.go index ee2d313a79..bf863adf05 100644 --- a/typedapi/types/matrixstatsfields.go +++ b/typedapi/types/matrixstatsfields.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MatrixStatsFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L763-L772 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L770-L779 type MatrixStatsFields struct { Correlation map[string]Float64 `json:"correlation"` Count int64 `json:"count"` @@ -67,7 +67,7 @@ func (s *MatrixStatsFields) UnmarshalJSON(data []byte) error { } case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -90,7 +90,7 @@ func (s *MatrixStatsFields) UnmarshalJSON(data []byte) error { } case "kurtosis": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -106,7 +106,7 @@ func (s *MatrixStatsFields) UnmarshalJSON(data []byte) error { } case "mean": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -127,7 +127,7 @@ func (s *MatrixStatsFields) UnmarshalJSON(data []byte) error { } case "skewness": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -143,7 +143,7 @@ func (s *MatrixStatsFields) UnmarshalJSON(data []byte) error { } case "variance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/maxaggregate.go b/typedapi/types/maxaggregate.go index ee8f5fc9c4..4844a993de 100644 --- a/typedapi/types/maxaggregate.go +++ b/typedapi/types/maxaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,14 +31,14 @@ import ( // MaxAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L200-L201 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L200-L201 type MaxAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. - Value Float64 `json:"value,omitempty"` - ValueAsString *string `json:"value_as_string,omitempty"` + Value *Float64 `json:"value,omitempty"` + ValueAsString *string `json:"value_as_string,omitempty"` } func (s *MaxAggregate) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/maxaggregation.go b/typedapi/types/maxaggregation.go index 96dad54e5a..c6919b1094 100644 --- a/typedapi/types/maxaggregation.go +++ b/typedapi/types/maxaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MaxAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L162-L162 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L162-L162 type MaxAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` diff --git a/typedapi/types/maxbucketaggregation.go b/typedapi/types/maxbucketaggregation.go index 4b787e584e..b649db59fa 100644 --- a/typedapi/types/maxbucketaggregation.go +++ b/typedapi/types/maxbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // MaxBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L224-L224 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L224-L224 type MaxBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,8 +43,6 @@ type MaxBucketAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` } func (s *MaxBucketAggregation) UnmarshalJSON(data []byte) error { @@ -84,23 +82,6 @@ func (s *MaxBucketAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/medianabsolutedeviationaggregate.go b/typedapi/types/medianabsolutedeviationaggregate.go index 88c4e47850..094be0621d 100644 --- a/typedapi/types/medianabsolutedeviationaggregate.go +++ b/typedapi/types/medianabsolutedeviationaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,14 +31,14 @@ import ( // MedianAbsoluteDeviationAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L194-L195 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L194-L195 type MedianAbsoluteDeviationAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. - Value Float64 `json:"value,omitempty"` - ValueAsString *string `json:"value_as_string,omitempty"` + Value *Float64 `json:"value,omitempty"` + ValueAsString *string `json:"value_as_string,omitempty"` } func (s *MedianAbsoluteDeviationAggregate) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/medianabsolutedeviationaggregation.go b/typedapi/types/medianabsolutedeviationaggregation.go index 2379a977e6..f4c11a290f 100644 --- a/typedapi/types/medianabsolutedeviationaggregation.go +++ b/typedapi/types/medianabsolutedeviationaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MedianAbsoluteDeviationAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L164-L170 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L164-L170 type MedianAbsoluteDeviationAggregation struct { // Compression Limits the maximum number of nodes used by the underlying TDigest algorithm // to `20 * compression`, enabling control of memory usage and approximation @@ -62,7 +62,7 @@ func (s *MedianAbsoluteDeviationAggregation) UnmarshalJSON(data []byte) error { switch t { case "compression": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/memmlstats.go b/typedapi/types/memmlstats.go index ccc784fdf3..c5c7cca45e 100644 --- a/typedapi/types/memmlstats.go +++ b/typedapi/types/memmlstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MemMlStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_memory_stats/types.ts#L90-L111 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_memory_stats/types.ts#L90-L111 type MemMlStats struct { // AnomalyDetectors Amount of native memory set aside for anomaly detection jobs. AnomalyDetectors ByteSize `json:"anomaly_detectors,omitempty"` @@ -83,7 +83,7 @@ func (s *MemMlStats) UnmarshalJSON(data []byte) error { case "anomaly_detectors_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -104,7 +104,7 @@ func (s *MemMlStats) UnmarshalJSON(data []byte) error { case "data_frame_analytics_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *MemMlStats) UnmarshalJSON(data []byte) error { case "max_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -146,7 +146,7 @@ func (s *MemMlStats) UnmarshalJSON(data []byte) error { case "native_code_overhead_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -167,7 +167,7 @@ func (s *MemMlStats) UnmarshalJSON(data []byte) error { case "native_inference_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/memory.go b/typedapi/types/memory.go index 0f7c4efac8..4d150d04b6 100644 --- a/typedapi/types/memory.go +++ b/typedapi/types/memory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // Memory type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_memory_stats/types.ts#L25-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_memory_stats/types.ts#L25-L48 type Memory struct { Attributes map[string]string `json:"attributes"` EphemeralId string `json:"ephemeral_id"` diff --git a/typedapi/types/memorystats.go b/typedapi/types/memorystats.go index bdadd9b223..1ef0a5e669 100644 --- a/typedapi/types/memorystats.go +++ b/typedapi/types/memorystats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MemoryStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L596-L620 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L596-L620 type MemoryStats struct { // AdjustedTotalInBytes If the amount of physical memory has been overridden using the // `es`.`total_memory_bytes` system property then this reports the overridden @@ -68,7 +68,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { switch t { case "adjusted_total_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { } case "free_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -110,7 +110,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { s.Resident = &o case "resident_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -137,7 +137,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { s.Share = &o case "share_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -152,7 +152,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { } case "total_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -179,7 +179,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { s.TotalVirtual = &o case "total_virtual_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -194,7 +194,7 @@ func (s *MemoryStats) UnmarshalJSON(data []byte) error { } case "used_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/memstats.go b/typedapi/types/memstats.go index 5e2ed6c4ba..fa67a69a81 100644 --- a/typedapi/types/memstats.go +++ b/typedapi/types/memstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MemStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/get_memory_stats/types.ts#L65-L88 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/get_memory_stats/types.ts#L65-L88 type MemStats struct { // AdjustedTotal If the amount of physical memory has been overridden using the // es.total_memory_bytes system property @@ -73,7 +73,7 @@ func (s *MemStats) UnmarshalJSON(data []byte) error { case "adjusted_total_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -99,7 +99,7 @@ func (s *MemStats) UnmarshalJSON(data []byte) error { case "total_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/merge.go b/typedapi/types/merge.go index 0ba55b3b23..df2890fb6f 100644 --- a/typedapi/types/merge.go +++ b/typedapi/types/merge.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Merge type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L330-L332 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L332-L334 type Merge struct { Scheduler *MergeScheduler `json:"scheduler,omitempty"` } diff --git a/typedapi/types/mergescheduler.go b/typedapi/types/mergescheduler.go index 394ceadb2b..570af048c0 100644 --- a/typedapi/types/mergescheduler.go +++ b/typedapi/types/mergescheduler.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MergeScheduler type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L334-L337 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L336-L339 type MergeScheduler struct { MaxMergeCount Stringifiedinteger `json:"max_merge_count,omitempty"` MaxThreadCount Stringifiedinteger `json:"max_thread_count,omitempty"` diff --git a/typedapi/types/mergesstats.go b/typedapi/types/mergesstats.go index 7c5b8d65e2..1a7a888058 100644 --- a/typedapi/types/mergesstats.go +++ b/typedapi/types/mergesstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MergesStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L161-L178 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L161-L178 type MergesStats struct { Current int64 `json:"current"` CurrentDocs int64 `json:"current_docs"` @@ -67,7 +67,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { switch t { case "current": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -82,7 +82,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { } case "current_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -109,7 +109,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { s.CurrentSize = &o case "current_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -124,7 +124,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -151,7 +151,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { s.TotalAutoThrottle = &o case "total_auto_throttle_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -166,7 +166,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { } case "total_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -193,7 +193,7 @@ func (s *MergesStats) UnmarshalJSON(data []byte) error { s.TotalSize = &o case "total_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/metadata.go b/typedapi/types/metadata.go index f98cc6b25d..217586b9df 100644 --- a/typedapi/types/metadata.go +++ b/typedapi/types/metadata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,5 +26,5 @@ import ( // Metadata type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L99-L99 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L99-L99 type Metadata map[string]json.RawMessage diff --git a/typedapi/types/metrics.go b/typedapi/types/metrics.go index 3d505c18b1..91833b2b1f 100644 --- a/typedapi/types/metrics.go +++ b/typedapi/types/metrics.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Metrics type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L76-L76 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L76-L76 type Metrics []string diff --git a/typedapi/types/mgetoperation.go b/typedapi/types/mgetoperation.go index dbdad04945..0fbabc7bd7 100644 --- a/typedapi/types/mgetoperation.go +++ b/typedapi/types/mgetoperation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // MgetOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/mget/types.ts#L32-L55 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/mget/types.ts#L32-L55 type MgetOperation struct { // Id_ The unique document ID. Id_ string `json:"_id"` diff --git a/typedapi/types/mgetresponseitem.go b/typedapi/types/mgetresponseitem.go index 3ab3030b78..611116a133 100644 --- a/typedapi/types/mgetresponseitem.go +++ b/typedapi/types/mgetresponseitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // GetResult // MultiGetError // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/mget/types.ts#L57-L60 -type MgetResponseItem interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/mget/types.ts#L57-L60 +type MgetResponseItem any diff --git a/typedapi/types/migrationfeatureindexinfo.go b/typedapi/types/migrationfeatureindexinfo.go index 52f3876a9a..c00af76314 100644 --- a/typedapi/types/migrationfeatureindexinfo.go +++ b/typedapi/types/migrationfeatureindexinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MigrationFeatureIndexInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L44-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/migration/get_feature_upgrade_status/GetFeatureUpgradeStatusResponse.ts#L44-L48 type MigrationFeatureIndexInfo struct { FailureCause *ErrorCause `json:"failure_cause,omitempty"` Index string `json:"index"` diff --git a/typedapi/types/minaggregate.go b/typedapi/types/minaggregate.go index 1bb3b6bc76..a866e244a1 100644 --- a/typedapi/types/minaggregate.go +++ b/typedapi/types/minaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,14 +31,14 @@ import ( // MinAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L197-L198 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L197-L198 type MinAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. - Value Float64 `json:"value,omitempty"` - ValueAsString *string `json:"value_as_string,omitempty"` + Value *Float64 `json:"value,omitempty"` + ValueAsString *string `json:"value_as_string,omitempty"` } func (s *MinAggregate) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/minaggregation.go b/typedapi/types/minaggregation.go index b34a3d1b1a..4e9a460e69 100644 --- a/typedapi/types/minaggregation.go +++ b/typedapi/types/minaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MinAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L172-L172 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L172-L172 type MinAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` diff --git a/typedapi/types/minbucketaggregation.go b/typedapi/types/minbucketaggregation.go index 182f66f1b4..b46d1c57c3 100644 --- a/typedapi/types/minbucketaggregation.go +++ b/typedapi/types/minbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // MinBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L226-L226 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L226-L226 type MinBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,8 +43,6 @@ type MinBucketAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` } func (s *MinBucketAggregation) UnmarshalJSON(data []byte) error { @@ -84,23 +82,6 @@ func (s *MinBucketAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/minimallicenseinformation.go b/typedapi/types/minimallicenseinformation.go index 9dbde65296..796fa8caf4 100644 --- a/typedapi/types/minimallicenseinformation.go +++ b/typedapi/types/minimallicenseinformation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // MinimalLicenseInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/info/types.ts#L34-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/info/types.ts#L34-L40 type MinimalLicenseInformation struct { ExpiryDateInMillis int64 `json:"expiry_date_in_millis"` Mode licensetype.LicenseType `json:"mode"` diff --git a/typedapi/types/minimumshouldmatch.go b/typedapi/types/minimumshouldmatch.go index 7c88279861..3a74073cfb 100644 --- a/typedapi/types/minimumshouldmatch.go +++ b/typedapi/types/minimumshouldmatch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // int // string // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L163-L167 -type MinimumShouldMatch interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L163-L167 +type MinimumShouldMatch any diff --git a/typedapi/types/missing.go b/typedapi/types/missing.go index 528d49a8d9..b5ea5bc52a 100644 --- a/typedapi/types/missing.go +++ b/typedapi/types/missing.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -27,5 +27,5 @@ package types // Float64 // bool // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/AggregationContainer.ts#L517-L517 -type Missing interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/AggregationContainer.ts#L516-L516 +type Missing any diff --git a/typedapi/types/missingaggregate.go b/typedapi/types/missingaggregate.go index 4fa51917db..81155ceee2 100644 --- a/typedapi/types/missingaggregate.go +++ b/typedapi/types/missingaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // MissingAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L483-L484 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L487-L488 type MissingAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -55,7 +55,7 @@ func (s *MissingAggregate) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +519,7 @@ func (s *MissingAggregate) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -569,7 +569,7 @@ func (s *MissingAggregate) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -579,7 +579,7 @@ func (s *MissingAggregate) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -596,7 +596,7 @@ func (s *MissingAggregate) UnmarshalJSON(data []byte) error { func (s MissingAggregate) MarshalJSON() ([]byte, error) { type opt MissingAggregate // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/missingaggregation.go b/typedapi/types/missingaggregation.go index beba191598..052e185cb7 100644 --- a/typedapi/types/missingaggregation.go +++ b/typedapi/types/missingaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,18 +26,15 @@ import ( "errors" "fmt" "io" - "strconv" ) // MissingAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L574-L580 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L576-L582 type MissingAggregation struct { // Field The name of the field. - Field *string `json:"field,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Missing Missing `json:"missing,omitempty"` - Name *string `json:"name,omitempty"` + Field *string `json:"field,omitempty"` + Missing Missing `json:"missing,omitempty"` } func (s *MissingAggregation) UnmarshalJSON(data []byte) error { @@ -60,28 +57,11 @@ func (s *MissingAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Field", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "missing": if err := dec.Decode(&s.Missing); err != nil { return fmt.Errorf("%s | %w", "Missing", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/mlcounter.go b/typedapi/types/mlcounter.go index acf6e361cf..84abf9e58f 100644 --- a/typedapi/types/mlcounter.go +++ b/typedapi/types/mlcounter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MlCounter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L255-L257 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L255-L257 type MlCounter struct { Count int64 `json:"count"` } @@ -52,7 +52,7 @@ func (s *MlCounter) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/mldatafeed.go b/typedapi/types/mldatafeed.go index 40436cc989..138044bca4 100644 --- a/typedapi/types/mldatafeed.go +++ b/typedapi/types/mldatafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MLDatafeed type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Datafeed.ts#L37-L58 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Datafeed.ts#L36-L57 type MLDatafeed struct { Aggregations map[string]Aggregations `json:"aggregations,omitempty"` // Authorization The security privileges that the datafeed uses to run its queries. If Elastic @@ -124,7 +124,7 @@ func (s *MLDatafeed) UnmarshalJSON(data []byte) error { case "max_empty_searches": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -163,7 +163,7 @@ func (s *MLDatafeed) UnmarshalJSON(data []byte) error { case "scroll_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/mldataframeanalyticsjobs.go b/typedapi/types/mldataframeanalyticsjobs.go index a5b72d10f9..115f01c499 100644 --- a/typedapi/types/mldataframeanalyticsjobs.go +++ b/typedapi/types/mldataframeanalyticsjobs.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // MlDataFrameAnalyticsJobs type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L177-L182 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L177-L182 type MlDataFrameAnalyticsJobs struct { All_ MlDataFrameAnalyticsJobsCount `json:"_all"` AnalysisCounts *MlDataFrameAnalyticsJobsAnalysis `json:"analysis_counts,omitempty"` diff --git a/typedapi/types/mldataframeanalyticsjobsanalysis.go b/typedapi/types/mldataframeanalyticsjobsanalysis.go index 67c2821fe5..9390f1cd36 100644 --- a/typedapi/types/mldataframeanalyticsjobsanalysis.go +++ b/typedapi/types/mldataframeanalyticsjobsanalysis.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MlDataFrameAnalyticsJobsAnalysis type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L184-L188 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L184-L188 type MlDataFrameAnalyticsJobsAnalysis struct { Classification *int `json:"classification,omitempty"` OutlierDetection *int `json:"outlier_detection,omitempty"` @@ -55,7 +55,7 @@ func (s *MlDataFrameAnalyticsJobsAnalysis) UnmarshalJSON(data []byte) error { case "classification": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *MlDataFrameAnalyticsJobsAnalysis) UnmarshalJSON(data []byte) error { case "outlier_detection": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *MlDataFrameAnalyticsJobsAnalysis) UnmarshalJSON(data []byte) error { case "regression": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/mldataframeanalyticsjobscount.go b/typedapi/types/mldataframeanalyticsjobscount.go index 0b183cdfea..2fceda3161 100644 --- a/typedapi/types/mldataframeanalyticsjobscount.go +++ b/typedapi/types/mldataframeanalyticsjobscount.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MlDataFrameAnalyticsJobsCount type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L194-L196 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L194-L196 type MlDataFrameAnalyticsJobsCount struct { Count int64 `json:"count"` } @@ -52,7 +52,7 @@ func (s *MlDataFrameAnalyticsJobsCount) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/mldataframeanalyticsjobsmemory.go b/typedapi/types/mldataframeanalyticsjobsmemory.go index c053f6d3df..8f7007213c 100644 --- a/typedapi/types/mldataframeanalyticsjobsmemory.go +++ b/typedapi/types/mldataframeanalyticsjobsmemory.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // MlDataFrameAnalyticsJobsMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L190-L192 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L190-L192 type MlDataFrameAnalyticsJobsMemory struct { PeakUsageBytes JobStatistics `json:"peak_usage_bytes"` } diff --git a/typedapi/types/mlfilter.go b/typedapi/types/mlfilter.go index 5b8bfdf4d6..48e13ffd2c 100644 --- a/typedapi/types/mlfilter.go +++ b/typedapi/types/mlfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MLFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Filter.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Filter.ts#L22-L29 type MLFilter struct { // Description A description of the filter. Description *string `json:"description,omitempty"` diff --git a/typedapi/types/mlinference.go b/typedapi/types/mlinference.go index 2b86b51f50..a6365096d6 100644 --- a/typedapi/types/mlinference.go +++ b/typedapi/types/mlinference.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // MlInference type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L198-L206 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L198-L206 type MlInference struct { Deployments *MlInferenceDeployments `json:"deployments,omitempty"` IngestProcessors map[string]MlInferenceIngestProcessor `json:"ingest_processors"` diff --git a/typedapi/types/mlinferencedeployments.go b/typedapi/types/mlinferencedeployments.go index 93d05ef0f1..b35b20fdca 100644 --- a/typedapi/types/mlinferencedeployments.go +++ b/typedapi/types/mlinferencedeployments.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MlInferenceDeployments type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L227-L232 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L227-L232 type MlInferenceDeployments struct { Count int `json:"count"` InferenceCounts JobStatistics `json:"inference_counts"` @@ -56,7 +56,7 @@ func (s *MlInferenceDeployments) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/mlinferencedeploymentstimems.go b/typedapi/types/mlinferencedeploymentstimems.go index 3138d1dcab..1da54903ad 100644 --- a/typedapi/types/mlinferencedeploymentstimems.go +++ b/typedapi/types/mlinferencedeploymentstimems.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MlInferenceDeploymentsTimeMs type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L234-L236 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L234-L236 type MlInferenceDeploymentsTimeMs struct { Avg Float64 `json:"avg"` } @@ -52,7 +52,7 @@ func (s *MlInferenceDeploymentsTimeMs) UnmarshalJSON(data []byte) error { switch t { case "avg": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/mlinferenceingestprocessor.go b/typedapi/types/mlinferenceingestprocessor.go index 00a2f90da0..dbcc647398 100644 --- a/typedapi/types/mlinferenceingestprocessor.go +++ b/typedapi/types/mlinferenceingestprocessor.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // MlInferenceIngestProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L208-L213 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L208-L213 type MlInferenceIngestProcessor struct { NumDocsProcessed MlInferenceIngestProcessorCount `json:"num_docs_processed"` NumFailures MlInferenceIngestProcessorCount `json:"num_failures"` diff --git a/typedapi/types/mlinferenceingestprocessorcount.go b/typedapi/types/mlinferenceingestprocessorcount.go index e49a8aa7ed..717fdf9e74 100644 --- a/typedapi/types/mlinferenceingestprocessorcount.go +++ b/typedapi/types/mlinferenceingestprocessorcount.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MlInferenceIngestProcessorCount type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L238-L242 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L238-L242 type MlInferenceIngestProcessorCount struct { Max int64 `json:"max"` Min int64 `json:"min"` @@ -54,7 +54,7 @@ func (s *MlInferenceIngestProcessorCount) UnmarshalJSON(data []byte) error { switch t { case "max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -69,7 +69,7 @@ func (s *MlInferenceIngestProcessorCount) UnmarshalJSON(data []byte) error { } case "min": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -84,7 +84,7 @@ func (s *MlInferenceIngestProcessorCount) UnmarshalJSON(data []byte) error { } case "sum": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/mlinferencetrainedmodels.go b/typedapi/types/mlinferencetrainedmodels.go index d6c3b209c9..7357364734 100644 --- a/typedapi/types/mlinferencetrainedmodels.go +++ b/typedapi/types/mlinferencetrainedmodels.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // MlInferenceTrainedModels type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L215-L225 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L215-L225 type MlInferenceTrainedModels struct { All_ MlCounter `json:"_all"` Count *MlInferenceTrainedModelsCount `json:"count,omitempty"` diff --git a/typedapi/types/mlinferencetrainedmodelscount.go b/typedapi/types/mlinferencetrainedmodelscount.go index 67524e5b75..d73ee30994 100644 --- a/typedapi/types/mlinferencetrainedmodelscount.go +++ b/typedapi/types/mlinferencetrainedmodelscount.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MlInferenceTrainedModelsCount type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L244-L253 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L244-L253 type MlInferenceTrainedModelsCount struct { Classification *int64 `json:"classification,omitempty"` Ner *int64 `json:"ner,omitempty"` @@ -59,7 +59,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { switch t { case "classification": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -74,7 +74,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { } case "ner": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -89,7 +89,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { } case "other": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -104,7 +104,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { } case "pass_through": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -119,7 +119,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { } case "prepackaged": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -134,7 +134,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { } case "regression": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -149,7 +149,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { } case "text_embedding": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -164,7 +164,7 @@ func (s *MlInferenceTrainedModelsCount) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/mljobforecasts.go b/typedapi/types/mljobforecasts.go index 17448f2b00..8925db2c10 100644 --- a/typedapi/types/mljobforecasts.go +++ b/typedapi/types/mljobforecasts.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MlJobForecasts type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L172-L175 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L172-L175 type MlJobForecasts struct { ForecastedJobs int64 `json:"forecasted_jobs"` Total int64 `json:"total"` @@ -53,7 +53,7 @@ func (s *MlJobForecasts) UnmarshalJSON(data []byte) error { switch t { case "forecasted_jobs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -68,7 +68,7 @@ func (s *MlJobForecasts) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/modelplotconfig.go b/typedapi/types/modelplotconfig.go index ae5532c523..5c2f7ba9e6 100644 --- a/typedapi/types/modelplotconfig.go +++ b/typedapi/types/modelplotconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ModelPlotConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/ModelPlot.ts#L23-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/ModelPlot.ts#L23-L42 type ModelPlotConfig struct { // AnnotationsEnabled If true, enables calculation and storage of the model change annotations for // each entity that is being analyzed. @@ -62,7 +62,7 @@ func (s *ModelPlotConfig) UnmarshalJSON(data []byte) error { switch t { case "annotations_enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -76,7 +76,7 @@ func (s *ModelPlotConfig) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/modelsizestats.go b/typedapi/types/modelsizestats.go index 5a46b6567a..97554e5e70 100644 --- a/typedapi/types/modelsizestats.go +++ b/typedapi/types/modelsizestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // ModelSizeStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Model.ts#L59-L81 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Model.ts#L59-L81 type ModelSizeStats struct { AssignmentMemoryBasis *string `json:"assignment_memory_basis,omitempty"` BucketAllocationFailuresCount int64 `json:"bucket_allocation_failures_count"` @@ -87,7 +87,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { s.AssignmentMemoryBasis = &o case "bucket_allocation_failures_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -108,7 +108,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case "categorized_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -124,7 +124,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case "dead_category_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -140,7 +140,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case "failed_category_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -156,7 +156,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case "frequent_category_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -207,7 +207,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case "rare_category_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -234,7 +234,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { s.ResultType = o case "timestamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -249,7 +249,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { } case "total_by_field_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -265,7 +265,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { case "total_category_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -280,7 +280,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { } case "total_over_field_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -295,7 +295,7 @@ func (s *ModelSizeStats) UnmarshalJSON(data []byte) error { } case "total_partition_field_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/modelsnapshot.go b/typedapi/types/modelsnapshot.go index fc70f31e7b..5b7f7e63cf 100644 --- a/typedapi/types/modelsnapshot.go +++ b/typedapi/types/modelsnapshot.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ModelSnapshot type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Model.ts#L25-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Model.ts#L25-L46 type ModelSnapshot struct { // Description An optional description of the job. Description *string `json:"description,omitempty"` @@ -92,7 +92,7 @@ func (s *ModelSnapshot) UnmarshalJSON(data []byte) error { case "latest_record_time_stamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -108,7 +108,7 @@ func (s *ModelSnapshot) UnmarshalJSON(data []byte) error { case "latest_result_time_stamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -133,7 +133,7 @@ func (s *ModelSnapshot) UnmarshalJSON(data []byte) error { } case "retain": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -147,7 +147,7 @@ func (s *ModelSnapshot) UnmarshalJSON(data []byte) error { } case "snapshot_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -167,7 +167,7 @@ func (s *ModelSnapshot) UnmarshalJSON(data []byte) error { } case "timestamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/modelsnapshotupgrade.go b/typedapi/types/modelsnapshotupgrade.go index cf79510ec9..76ea8972bd 100644 --- a/typedapi/types/modelsnapshotupgrade.go +++ b/typedapi/types/modelsnapshotupgrade.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // ModelSnapshotUpgrade type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Model.ts#L48-L57 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Model.ts#L48-L57 type ModelSnapshotUpgrade struct { AssignmentExplanation string `json:"assignment_explanation"` JobId string `json:"job_id"` diff --git a/typedapi/types/monitoring.go b/typedapi/types/monitoring.go index 7c303841b9..0af3c98937 100644 --- a/typedapi/types/monitoring.go +++ b/typedapi/types/monitoring.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Monitoring type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L381-L384 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L381-L384 type Monitoring struct { Available bool `json:"available"` CollectionEnabled bool `json:"collection_enabled"` @@ -55,7 +55,7 @@ func (s *Monitoring) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -69,7 +69,7 @@ func (s *Monitoring) UnmarshalJSON(data []byte) error { } case "collection_enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *Monitoring) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/morelikethisquery.go b/typedapi/types/morelikethisquery.go index 9931115aab..fe62ecbdea 100644 --- a/typedapi/types/morelikethisquery.go +++ b/typedapi/types/morelikethisquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // MoreLikeThisQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L78-L163 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L87-L169 type MoreLikeThisQuery struct { // Analyzer The analyzer that is used to analyze the free form text. // Defaults to the analyzer associated with the first field in fields. @@ -80,10 +80,8 @@ type MoreLikeThisQuery struct { // MinimumShouldMatch After the disjunctive query has been formed, this parameter controls the // number of terms that must match. MinimumShouldMatch MinimumShouldMatch `json:"minimum_should_match,omitempty"` - // PerFieldAnalyzer Overrides the default analyzer. - PerFieldAnalyzer map[string]string `json:"per_field_analyzer,omitempty"` - QueryName_ *string `json:"_name,omitempty"` - Routing *string `json:"routing,omitempty"` + QueryName_ *string `json:"_name,omitempty"` + Routing *string `json:"routing,omitempty"` // StopWords An array of stop words. // Any word in this set is ignored. StopWords []string `json:"stop_words,omitempty"` @@ -122,7 +120,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { s.Analyzer = &o case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -138,7 +136,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { } case "boost_terms": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -154,7 +152,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { } case "fail_on_unsupported_field": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -173,7 +171,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { } case "include": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -204,7 +202,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case "max_doc_freq": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -220,7 +218,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case "max_query_terms": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -236,7 +234,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case "max_word_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -252,7 +250,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case "min_doc_freq": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -268,7 +266,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case "min_term_freq": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -284,7 +282,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { case "min_word_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -303,14 +301,6 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "MinimumShouldMatch", err) } - case "per_field_analyzer": - if s.PerFieldAnalyzer == nil { - s.PerFieldAnalyzer = make(map[string]string, 0) - } - if err := dec.Decode(&s.PerFieldAnalyzer); err != nil { - return fmt.Errorf("%s | %w", "PerFieldAnalyzer", err) - } - case "_name": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { @@ -377,9 +367,7 @@ func (s *MoreLikeThisQuery) UnmarshalJSON(data []byte) error { // NewMoreLikeThisQuery returns a MoreLikeThisQuery. func NewMoreLikeThisQuery() *MoreLikeThisQuery { - r := &MoreLikeThisQuery{ - PerFieldAnalyzer: make(map[string]string, 0), - } + r := &MoreLikeThisQuery{} return r } diff --git a/typedapi/types/mountedsnapshot.go b/typedapi/types/mountedsnapshot.go index 184dd7756f..6d8b31d29d 100644 --- a/typedapi/types/mountedsnapshot.go +++ b/typedapi/types/mountedsnapshot.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MountedSnapshot type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/searchable_snapshots/mount/types.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/searchable_snapshots/mount/types.ts#L23-L27 type MountedSnapshot struct { Indices []string `json:"indices"` Shards ShardStatistics `json:"shards"` diff --git a/typedapi/types/movingaverageaggregation.go b/typedapi/types/movingaverageaggregation.go index a94cbda380..ec5e4d6924 100644 --- a/typedapi/types/movingaverageaggregation.go +++ b/typedapi/types/movingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -28,5 +28,5 @@ package types // HoltMovingAverageAggregation // HoltWintersMovingAverageAggregation // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L228-L234 -type MovingAverageAggregation interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L228-L234 +type MovingAverageAggregation any diff --git a/typedapi/types/movingfunctionaggregation.go b/typedapi/types/movingfunctionaggregation.go index c5a7dc8c32..3ffcbc4640 100644 --- a/typedapi/types/movingfunctionaggregation.go +++ b/typedapi/types/movingfunctionaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // MovingFunctionAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L288-L303 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L288-L303 type MovingFunctionAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,8 +43,6 @@ type MovingFunctionAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` // Script The script that should be executed on each window of data. Script *string `json:"script,omitempty"` // Shift By default, the window consists of the last n values excluding the current @@ -93,23 +91,6 @@ func (s *MovingFunctionAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "script": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { @@ -124,7 +105,7 @@ func (s *MovingFunctionAggregation) UnmarshalJSON(data []byte) error { case "shift": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -140,7 +121,7 @@ func (s *MovingFunctionAggregation) UnmarshalJSON(data []byte) error { case "window": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/movingpercentilesaggregation.go b/typedapi/types/movingpercentilesaggregation.go index 7fd54d525b..e9dc88216b 100644 --- a/typedapi/types/movingpercentilesaggregation.go +++ b/typedapi/types/movingpercentilesaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // MovingPercentilesAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L305-L317 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L305-L317 type MovingPercentilesAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -44,8 +44,6 @@ type MovingPercentilesAggregation struct { // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` Keyed *bool `json:"keyed,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` // Shift By default, the window consists of the last n values excluding the current // bucket. // Increasing `shift` by 1, moves the starting window position by 1 to the @@ -93,7 +91,7 @@ func (s *MovingPercentilesAggregation) UnmarshalJSON(data []byte) error { } case "keyed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -106,26 +104,9 @@ func (s *MovingPercentilesAggregation) UnmarshalJSON(data []byte) error { s.Keyed = &v } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "shift": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -141,7 +122,7 @@ func (s *MovingPercentilesAggregation) UnmarshalJSON(data []byte) error { case "window": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/msearchrequestitem.go b/typedapi/types/msearchrequestitem.go index 4bfcd03566..47f9e5533a 100644 --- a/typedapi/types/msearchrequestitem.go +++ b/typedapi/types/msearchrequestitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // MultisearchHeader // MultisearchBody // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/msearch/types.ts#L48-L51 -type MsearchRequestItem interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/msearch/types.ts#L47-L50 +type MsearchRequestItem any diff --git a/typedapi/types/msearchresponseitem.go b/typedapi/types/msearchresponseitem.go index 6520dde64f..c8c269bd27 100644 --- a/typedapi/types/msearchresponseitem.go +++ b/typedapi/types/msearchresponseitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // MultiSearchItem // ErrorResponseBase // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/msearch/types.ts#L209-L212 -type MsearchResponseItem interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/msearch/types.ts#L208-L211 +type MsearchResponseItem any diff --git a/typedapi/types/mtermvectorsoperation.go b/typedapi/types/mtermvectorsoperation.go index 1f198dd88f..ed4b8ea259 100644 --- a/typedapi/types/mtermvectorsoperation.go +++ b/typedapi/types/mtermvectorsoperation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // MTermVectorsOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/mtermvectors/types.ts#L35-L94 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/mtermvectors/types.ts#L35-L94 type MTermVectorsOperation struct { // Doc An artificial document (a document not present in the index) for which you // want to retrieve term vectors. @@ -49,7 +49,7 @@ type MTermVectorsOperation struct { // Filter Filter terms based on their tf-idf scores. Filter *TermVectorsFilter `json:"filter,omitempty"` // Id_ The ID of the document. - Id_ string `json:"_id"` + Id_ *string `json:"_id,omitempty"` // Index_ The index of the document. Index_ *string `json:"_index,omitempty"` // Offsets If `true`, the response includes term offsets. @@ -89,7 +89,7 @@ func (s *MTermVectorsOperation) UnmarshalJSON(data []byte) error { } case "field_statistics": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -134,7 +134,7 @@ func (s *MTermVectorsOperation) UnmarshalJSON(data []byte) error { } case "offsets": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -148,7 +148,7 @@ func (s *MTermVectorsOperation) UnmarshalJSON(data []byte) error { } case "payloads": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -162,7 +162,7 @@ func (s *MTermVectorsOperation) UnmarshalJSON(data []byte) error { } case "positions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -181,7 +181,7 @@ func (s *MTermVectorsOperation) UnmarshalJSON(data []byte) error { } case "term_statistics": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/multibucketaggregatebaseadjacencymatrixbucket.go b/typedapi/types/multibucketaggregatebaseadjacencymatrixbucket.go index 71258f2c49..69a7c473e7 100644 --- a/typedapi/types/multibucketaggregatebaseadjacencymatrixbucket.go +++ b/typedapi/types/multibucketaggregatebaseadjacencymatrixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseAdjacencyMatrixBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseAdjacencyMatrixBucket struct { Buckets BucketsAdjacencyMatrixBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasecompositebucket.go b/typedapi/types/multibucketaggregatebasecompositebucket.go index b3cd0f7186..3f2a436d99 100644 --- a/typedapi/types/multibucketaggregatebasecompositebucket.go +++ b/typedapi/types/multibucketaggregatebasecompositebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseCompositeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseCompositeBucket struct { Buckets BucketsCompositeBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasedatehistogrambucket.go b/typedapi/types/multibucketaggregatebasedatehistogrambucket.go index 1d8d0e5cec..6668de7a6f 100644 --- a/typedapi/types/multibucketaggregatebasedatehistogrambucket.go +++ b/typedapi/types/multibucketaggregatebasedatehistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseDateHistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseDateHistogramBucket struct { Buckets BucketsDateHistogramBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasedoubletermsbucket.go b/typedapi/types/multibucketaggregatebasedoubletermsbucket.go index a9bcc16db8..5d866b19c3 100644 --- a/typedapi/types/multibucketaggregatebasedoubletermsbucket.go +++ b/typedapi/types/multibucketaggregatebasedoubletermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseDoubleTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseDoubleTermsBucket struct { Buckets BucketsDoubleTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasefiltersbucket.go b/typedapi/types/multibucketaggregatebasefiltersbucket.go index ad56fb398d..1cc70cd30e 100644 --- a/typedapi/types/multibucketaggregatebasefiltersbucket.go +++ b/typedapi/types/multibucketaggregatebasefiltersbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseFiltersBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseFiltersBucket struct { Buckets BucketsFiltersBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasefrequentitemsetsbucket.go b/typedapi/types/multibucketaggregatebasefrequentitemsetsbucket.go index bf22c11cbf..c4e3098978 100644 --- a/typedapi/types/multibucketaggregatebasefrequentitemsetsbucket.go +++ b/typedapi/types/multibucketaggregatebasefrequentitemsetsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseFrequentItemSetsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseFrequentItemSetsBucket struct { Buckets BucketsFrequentItemSetsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasegeohashgridbucket.go b/typedapi/types/multibucketaggregatebasegeohashgridbucket.go index d77f347e13..d46e4d041c 100644 --- a/typedapi/types/multibucketaggregatebasegeohashgridbucket.go +++ b/typedapi/types/multibucketaggregatebasegeohashgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseGeoHashGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseGeoHashGridBucket struct { Buckets BucketsGeoHashGridBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasegeohexgridbucket.go b/typedapi/types/multibucketaggregatebasegeohexgridbucket.go index 094326f85b..4300dec506 100644 --- a/typedapi/types/multibucketaggregatebasegeohexgridbucket.go +++ b/typedapi/types/multibucketaggregatebasegeohexgridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseGeoHexGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseGeoHexGridBucket struct { Buckets BucketsGeoHexGridBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasegeotilegridbucket.go b/typedapi/types/multibucketaggregatebasegeotilegridbucket.go index 700faca306..d1abbf0f49 100644 --- a/typedapi/types/multibucketaggregatebasegeotilegridbucket.go +++ b/typedapi/types/multibucketaggregatebasegeotilegridbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseGeoTileGridBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseGeoTileGridBucket struct { Buckets BucketsGeoTileGridBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasehistogrambucket.go b/typedapi/types/multibucketaggregatebasehistogrambucket.go index 7090e3ac91..ec62e99e8b 100644 --- a/typedapi/types/multibucketaggregatebasehistogrambucket.go +++ b/typedapi/types/multibucketaggregatebasehistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseHistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseHistogramBucket struct { Buckets BucketsHistogramBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebaseipprefixbucket.go b/typedapi/types/multibucketaggregatebaseipprefixbucket.go index 056d83769a..310ab15839 100644 --- a/typedapi/types/multibucketaggregatebaseipprefixbucket.go +++ b/typedapi/types/multibucketaggregatebaseipprefixbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseIpPrefixBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseIpPrefixBucket struct { Buckets BucketsIpPrefixBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebaseiprangebucket.go b/typedapi/types/multibucketaggregatebaseiprangebucket.go index 5b59b44927..105d0f09c9 100644 --- a/typedapi/types/multibucketaggregatebaseiprangebucket.go +++ b/typedapi/types/multibucketaggregatebaseiprangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseIpRangeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseIpRangeBucket struct { Buckets BucketsIpRangeBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebaselongraretermsbucket.go b/typedapi/types/multibucketaggregatebaselongraretermsbucket.go index 937cc88e92..3d9e2c43eb 100644 --- a/typedapi/types/multibucketaggregatebaselongraretermsbucket.go +++ b/typedapi/types/multibucketaggregatebaselongraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseLongRareTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseLongRareTermsBucket struct { Buckets BucketsLongRareTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebaselongtermsbucket.go b/typedapi/types/multibucketaggregatebaselongtermsbucket.go index c42ae66978..b3ef43b6fc 100644 --- a/typedapi/types/multibucketaggregatebaselongtermsbucket.go +++ b/typedapi/types/multibucketaggregatebaselongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseLongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseLongTermsBucket struct { Buckets BucketsLongTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasemultitermsbucket.go b/typedapi/types/multibucketaggregatebasemultitermsbucket.go index 71069b5137..384f15544e 100644 --- a/typedapi/types/multibucketaggregatebasemultitermsbucket.go +++ b/typedapi/types/multibucketaggregatebasemultitermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseMultiTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseMultiTermsBucket struct { Buckets BucketsMultiTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebaserangebucket.go b/typedapi/types/multibucketaggregatebaserangebucket.go index 97d968e6d0..74a7b5e181 100644 --- a/typedapi/types/multibucketaggregatebaserangebucket.go +++ b/typedapi/types/multibucketaggregatebaserangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseRangeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseRangeBucket struct { Buckets BucketsRangeBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasesignificantlongtermsbucket.go b/typedapi/types/multibucketaggregatebasesignificantlongtermsbucket.go index 0f9aadf227..bde8ac92a2 100644 --- a/typedapi/types/multibucketaggregatebasesignificantlongtermsbucket.go +++ b/typedapi/types/multibucketaggregatebasesignificantlongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseSignificantLongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseSignificantLongTermsBucket struct { Buckets BucketsSignificantLongTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasesignificantstringtermsbucket.go b/typedapi/types/multibucketaggregatebasesignificantstringtermsbucket.go index 63c7185920..e17fc618b5 100644 --- a/typedapi/types/multibucketaggregatebasesignificantstringtermsbucket.go +++ b/typedapi/types/multibucketaggregatebasesignificantstringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseSignificantStringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseSignificantStringTermsBucket struct { Buckets BucketsSignificantStringTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasestringraretermsbucket.go b/typedapi/types/multibucketaggregatebasestringraretermsbucket.go index 3622a1f376..69f392cd4f 100644 --- a/typedapi/types/multibucketaggregatebasestringraretermsbucket.go +++ b/typedapi/types/multibucketaggregatebasestringraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseStringRareTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseStringRareTermsBucket struct { Buckets BucketsStringRareTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasestringtermsbucket.go b/typedapi/types/multibucketaggregatebasestringtermsbucket.go index 019c193871..e689cb0c32 100644 --- a/typedapi/types/multibucketaggregatebasestringtermsbucket.go +++ b/typedapi/types/multibucketaggregatebasestringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseStringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseStringTermsBucket struct { Buckets BucketsStringTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasevariablewidthhistogrambucket.go b/typedapi/types/multibucketaggregatebasevariablewidthhistogrambucket.go index 179b0c03a8..f17f0a5f29 100644 --- a/typedapi/types/multibucketaggregatebasevariablewidthhistogrambucket.go +++ b/typedapi/types/multibucketaggregatebasevariablewidthhistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseVariableWidthHistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseVariableWidthHistogramBucket struct { Buckets BucketsVariableWidthHistogramBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/multibucketaggregatebasevoid.go b/typedapi/types/multibucketaggregatebasevoid.go index 98d574fb98..772e49df2c 100644 --- a/typedapi/types/multibucketaggregatebasevoid.go +++ b/typedapi/types/multibucketaggregatebasevoid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiBucketAggregateBaseVoid type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L327-L329 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L327-L329 type MultiBucketAggregateBaseVoid struct { Buckets BucketsVoid `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -59,13 +59,13 @@ func (s *MultiBucketAggregateBaseVoid) UnmarshalJSON(data []byte) error { localDec := json.NewDecoder(source) switch rawMsg[0] { case '{': - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := localDec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': - o := []interface{}{} + o := []any{} if err := localDec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Buckets", err) } diff --git a/typedapi/types/multigeterror.go b/typedapi/types/multigeterror.go index b779a5c6c8..f5b50199f5 100644 --- a/typedapi/types/multigeterror.go +++ b/typedapi/types/multigeterror.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiGetError type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/mget/types.ts#L62-L66 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/mget/types.ts#L62-L66 type MultiGetError struct { Error ErrorCause `json:"error"` Id_ string `json:"_id"` diff --git a/typedapi/types/multimatchquery.go b/typedapi/types/multimatchquery.go index 5018ba7ae3..4a9e804c35 100644 --- a/typedapi/types/multimatchquery.go +++ b/typedapi/types/multimatchquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // MultiMatchQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L456-L539 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L456-L539 type MultiMatchQuery struct { // Analyzer Analyzer used to convert the text in the query value into tokens. Analyzer *string `json:"analyzer,omitempty"` @@ -116,7 +116,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { s.Analyzer = &o case "auto_generate_synonyms_phrase_query": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -130,7 +130,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { } case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -146,7 +146,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { } case "cutoff_frequency": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -188,7 +188,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { } case "fuzzy_transpositions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -202,7 +202,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { } case "lenient": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -217,7 +217,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case "max_expansions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -243,7 +243,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case "prefix_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -283,7 +283,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { case "slop": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -298,7 +298,7 @@ func (s *MultiMatchQuery) UnmarshalJSON(data []byte) error { } case "tie_breaker": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/multiplexertokenfilter.go b/typedapi/types/multiplexertokenfilter.go index 8e4137e312..8ccffc9ee2 100644 --- a/typedapi/types/multiplexertokenfilter.go +++ b/typedapi/types/multiplexertokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiplexerTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L260-L264 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L262-L266 type MultiplexerTokenFilter struct { Filters []string `json:"filters"` PreserveOriginal Stringifiedboolean `json:"preserve_original,omitempty"` diff --git a/typedapi/types/multisearchbody.go b/typedapi/types/multisearchbody.go index 77fe5a74ff..10d89fa247 100644 --- a/typedapi/types/multisearchbody.go +++ b/typedapi/types/multisearchbody.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MultisearchBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/msearch/types.ts#L71-L202 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/msearch/types.ts#L70-L201 type MultisearchBody struct { Aggregations map[string]Aggregations `json:"aggregations,omitempty"` Collapse *FieldCollapse `json:"collapse,omitempty"` @@ -55,7 +55,7 @@ type MultisearchBody struct { // IndicesBoost Boosts the _score of documents from specified indices. IndicesBoost []map[string]Float64 `json:"indices_boost,omitempty"` // Knn Defines the approximate kNN search to run. - Knn []KnnQuery `json:"knn,omitempty"` + Knn []KnnSearch `json:"knn,omitempty"` // MinScore Minimum _score for matching documents. Documents with a lower _score are // not included in the search results. MinScore *Float64 `json:"min_score,omitempty"` @@ -155,7 +155,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { } case "explain": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -183,7 +183,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case "from": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -211,7 +211,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { rawMsg := json.RawMessage{} dec.Decode(&rawMsg) if !bytes.HasPrefix(rawMsg, []byte("[")) { - o := NewKnnQuery() + o := NewKnnSearch() if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Knn", err) } @@ -224,7 +224,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { } case "min_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -250,7 +250,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { } case "profile": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -303,7 +303,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { } case "seq_no_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -318,7 +318,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -380,7 +380,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { } case "terminate_after": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -407,7 +407,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { s.Timeout = &o case "track_scores": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -426,7 +426,7 @@ func (s *MultisearchBody) UnmarshalJSON(data []byte) error { } case "version": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/multisearchheader.go b/typedapi/types/multisearchheader.go index f5ae2f254b..77ec576eaa 100644 --- a/typedapi/types/multisearchheader.go +++ b/typedapi/types/multisearchheader.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // MultisearchHeader type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/msearch/types.ts#L53-L68 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/msearch/types.ts#L52-L67 type MultisearchHeader struct { AllowNoIndices *bool `json:"allow_no_indices,omitempty"` AllowPartialSearchResults *bool `json:"allow_partial_search_results,omitempty"` @@ -65,7 +65,7 @@ func (s *MultisearchHeader) UnmarshalJSON(data []byte) error { switch t { case "allow_no_indices": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -79,7 +79,7 @@ func (s *MultisearchHeader) UnmarshalJSON(data []byte) error { } case "allow_partial_search_results": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -93,7 +93,7 @@ func (s *MultisearchHeader) UnmarshalJSON(data []byte) error { } case "ccs_minimize_roundtrips": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -123,7 +123,7 @@ func (s *MultisearchHeader) UnmarshalJSON(data []byte) error { } case "ignore_throttled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -137,7 +137,7 @@ func (s *MultisearchHeader) UnmarshalJSON(data []byte) error { } case "ignore_unavailable": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -179,7 +179,7 @@ func (s *MultisearchHeader) UnmarshalJSON(data []byte) error { s.Preference = &o case "request_cache": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/multisearchitem.go b/typedapi/types/multisearchitem.go index bcb6eb5863..6871981245 100644 --- a/typedapi/types/multisearchitem.go +++ b/typedapi/types/multisearchitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // MultiSearchItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/msearch/types.ts#L214-L217 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/msearch/types.ts#L213-L216 type MultiSearchItem struct { Aggregations map[string]Aggregate `json:"aggregations,omitempty"` Clusters_ *ClusterStatistics `json:"_clusters,omitempty"` @@ -522,7 +522,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -572,7 +572,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -582,7 +582,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -610,7 +610,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { } case "max_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -626,7 +626,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { } case "num_reduce_phases": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -662,7 +662,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { case "status": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -720,7 +720,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -730,7 +730,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Suggest") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -740,7 +740,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { } case "terminated_early": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -754,7 +754,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { } case "timed_out": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -768,7 +768,7 @@ func (s *MultiSearchItem) UnmarshalJSON(data []byte) error { } case "took": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/multisearchresult.go b/typedapi/types/multisearchresult.go index 21dc549e7e..ce8f2802fe 100644 --- a/typedapi/types/multisearchresult.go +++ b/typedapi/types/multisearchresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MultiSearchResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/msearch/types.ts#L204-L207 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/msearch/types.ts#L203-L206 type MultiSearchResult struct { Responses []MsearchResponseItem `json:"responses"` Took int64 `json:"took"` @@ -94,7 +94,7 @@ func (s *MultiSearchResult) UnmarshalJSON(data []byte) error { } case "took": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/multitermlookup.go b/typedapi/types/multitermlookup.go index a4304768f9..0900eef742 100644 --- a/typedapi/types/multitermlookup.go +++ b/typedapi/types/multitermlookup.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // MultiTermLookup type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L624-L634 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L626-L636 type MultiTermLookup struct { // Field A fields from which to retrieve terms. Field string `json:"field"` diff --git a/typedapi/types/multitermsaggregate.go b/typedapi/types/multitermsaggregate.go index 3e4ca9f6bc..c214e167e9 100644 --- a/typedapi/types/multitermsaggregate.go +++ b/typedapi/types/multitermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MultiTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L461-L463 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L463-L465 type MultiTermsAggregate struct { Buckets BucketsMultiTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -76,7 +76,7 @@ func (s *MultiTermsAggregate) UnmarshalJSON(data []byte) error { } case "doc_count_error_upper_bound": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *MultiTermsAggregate) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/multitermsaggregation.go b/typedapi/types/multitermsaggregation.go index 63d93ffca6..0549338f1c 100644 --- a/typedapi/types/multitermsaggregation.go +++ b/typedapi/types/multitermsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,14 +34,12 @@ import ( // MultiTermsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L582-L622 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L584-L624 type MultiTermsAggregation struct { // CollectMode Specifies the strategy for data collection. CollectMode *termsaggregationcollectmode.TermsAggregationCollectMode `json:"collect_mode,omitempty"` - Meta Metadata `json:"meta,omitempty"` // MinDocCount The minimum number of documents in a bucket for it to be returned. - MinDocCount *int64 `json:"min_doc_count,omitempty"` - Name *string `json:"name,omitempty"` + MinDocCount *int64 `json:"min_doc_count,omitempty"` // Order Specifies the sort order of the buckets. // Defaults to sorting by descending document count. Order AggregateOrder `json:"order,omitempty"` @@ -80,13 +78,8 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "CollectMode", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "min_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,18 +93,6 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { s.MinDocCount = &f } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "order": rawMsg := json.RawMessage{} @@ -134,7 +115,7 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { } case "shard_min_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -150,7 +131,7 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { case "shard_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -165,7 +146,7 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { } case "show_term_doc_count_error": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -180,7 +161,7 @@ func (s *MultiTermsAggregation) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/multitermsbucket.go b/typedapi/types/multitermsbucket.go index 5eb8254561..379f620ba2 100644 --- a/typedapi/types/multitermsbucket.go +++ b/typedapi/types/multitermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // MultiTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L465-L469 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L467-L471 type MultiTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -57,7 +57,7 @@ func (s *MultiTermsBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *MultiTermsBucket) UnmarshalJSON(data []byte) error { } case "doc_count_error_upper_bound": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -548,7 +548,7 @@ func (s *MultiTermsBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -598,7 +598,7 @@ func (s *MultiTermsBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -608,7 +608,7 @@ func (s *MultiTermsBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -625,7 +625,7 @@ func (s *MultiTermsBucket) UnmarshalJSON(data []byte) error { func (s MultiTermsBucket) MarshalJSON() ([]byte, error) { type opt MultiTermsBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/murmur3hashproperty.go b/typedapi/types/murmur3hashproperty.go index 8ef8c4df7d..faf31e60c4 100644 --- a/typedapi/types/murmur3hashproperty.go +++ b/typedapi/types/murmur3hashproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // Murmur3HashProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/specialized.ts#L75-L77 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/specialized.ts#L81-L83 type Murmur3HashProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -80,7 +80,7 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -105,7 +105,7 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -126,7 +126,7 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -216,12 +216,6 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -240,6 +234,18 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -396,6 +402,12 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -407,7 +419,7 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -436,7 +448,7 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -457,7 +469,7 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -547,12 +559,6 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -571,6 +577,18 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -727,6 +745,12 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -749,7 +773,7 @@ func (s *Murmur3HashProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/mutualinformationheuristic.go b/typedapi/types/mutualinformationheuristic.go index 0c959e7393..0113897204 100644 --- a/typedapi/types/mutualinformationheuristic.go +++ b/typedapi/types/mutualinformationheuristic.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // MutualInformationHeuristic type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L753-L762 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L755-L764 type MutualInformationHeuristic struct { // BackgroundIsSuperset Set to `false` if you defined a custom background filter that represents a // different set of documents that you want to compare to. @@ -57,7 +57,7 @@ func (s *MutualInformationHeuristic) UnmarshalJSON(data []byte) error { switch t { case "background_is_superset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *MutualInformationHeuristic) UnmarshalJSON(data []byte) error { } case "include_negatives": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/names.go b/typedapi/types/names.go index 5149792ed0..bc8c7bdd80 100644 --- a/typedapi/types/names.go +++ b/typedapi/types/names.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Names type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L81-L81 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L81-L81 type Names []string diff --git a/typedapi/types/nativecode.go b/typedapi/types/nativecode.go index 46ebe1c003..6bcd558c07 100644 --- a/typedapi/types/nativecode.go +++ b/typedapi/types/nativecode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NativeCode type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/info/types.ts#L29-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/info/types.ts#L29-L32 type NativeCode struct { BuildHash string `json:"build_hash"` Version string `json:"version"` diff --git a/typedapi/types/nativecodeinformation.go b/typedapi/types/nativecodeinformation.go index 2cab5f3c78..f51ede03ad 100644 --- a/typedapi/types/nativecodeinformation.go +++ b/typedapi/types/nativecodeinformation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NativeCodeInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/info/types.ts#L29-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/info/types.ts#L29-L32 type NativeCodeInformation struct { BuildHash string `json:"build_hash"` Version string `json:"version"` diff --git a/typedapi/types/nerinferenceoptions.go b/typedapi/types/nerinferenceoptions.go index 39a1df7bb1..17f76fbf22 100644 --- a/typedapi/types/nerinferenceoptions.go +++ b/typedapi/types/nerinferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NerInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L255-L264 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L255-L264 type NerInferenceOptions struct { // ClassificationLabels The token classification labels. Must be IOB formatted tags ClassificationLabels []string `json:"classification_labels,omitempty"` diff --git a/typedapi/types/nerinferenceupdateoptions.go b/typedapi/types/nerinferenceupdateoptions.go index a98585a468..9ba95cf675 100644 --- a/typedapi/types/nerinferenceupdateoptions.go +++ b/typedapi/types/nerinferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NerInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L404-L409 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L404-L409 type NerInferenceUpdateOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. diff --git a/typedapi/types/nestedaggregate.go b/typedapi/types/nestedaggregate.go index 52f2d3b15f..9e90d98f56 100644 --- a/typedapi/types/nestedaggregate.go +++ b/typedapi/types/nestedaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // NestedAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L486-L487 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L490-L491 type NestedAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -55,7 +55,7 @@ func (s *NestedAggregate) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +519,7 @@ func (s *NestedAggregate) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -569,7 +569,7 @@ func (s *NestedAggregate) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -579,7 +579,7 @@ func (s *NestedAggregate) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -596,7 +596,7 @@ func (s *NestedAggregate) UnmarshalJSON(data []byte) error { func (s NestedAggregate) MarshalJSON() ([]byte, error) { type opt NestedAggregate // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/nestedaggregation.go b/typedapi/types/nestedaggregation.go index 6f54f337f6..a8e610fa93 100644 --- a/typedapi/types/nestedaggregation.go +++ b/typedapi/types/nestedaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,15 +26,12 @@ import ( "errors" "fmt" "io" - "strconv" ) // NestedAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L636-L641 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L638-L643 type NestedAggregation struct { - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` // Path The path to the field of type `nested`. Path *string `json:"path,omitempty"` } @@ -54,23 +51,6 @@ func (s *NestedAggregation) UnmarshalJSON(data []byte) error { switch t { - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "path": if err := dec.Decode(&s.Path); err != nil { return fmt.Errorf("%s | %w", "Path", err) diff --git a/typedapi/types/nestedidentity.go b/typedapi/types/nestedidentity.go index cb01cbbc5f..ba660973a1 100644 --- a/typedapi/types/nestedidentity.go +++ b/typedapi/types/nestedidentity.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NestedIdentity type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/hits.ts#L88-L92 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/hits.ts#L88-L92 type NestedIdentity struct { Field string `json:"field"` Nested_ *NestedIdentity `json:"_nested,omitempty"` @@ -65,7 +65,7 @@ func (s *NestedIdentity) UnmarshalJSON(data []byte) error { case "offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nestedproperty.go b/typedapi/types/nestedproperty.go index b1ced1bc1e..8ca9c631bb 100644 --- a/typedapi/types/nestedproperty.go +++ b/typedapi/types/nestedproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // NestedProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/complex.ts#L39-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/complex.ts#L39-L44 type NestedProperty struct { CopyTo []string `json:"copy_to,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` @@ -87,7 +87,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -107,7 +107,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -128,7 +128,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -218,12 +218,6 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -242,6 +236,18 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -398,6 +404,12 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -409,7 +421,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -424,7 +436,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { } case "include_in_parent": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -438,7 +450,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { } case "include_in_root": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -466,7 +478,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -487,7 +499,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -577,12 +589,6 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -601,6 +607,18 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -757,6 +775,12 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -779,7 +803,7 @@ func (s *NestedProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nestedquery.go b/typedapi/types/nestedquery.go index 512dcba159..1ae519f301 100644 --- a/typedapi/types/nestedquery.go +++ b/typedapi/types/nestedquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // NestedQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/joining.ts#L106-L130 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/joining.ts#L106-L130 type NestedQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -72,7 +72,7 @@ func (s *NestedQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *NestedQuery) UnmarshalJSON(data []byte) error { } case "ignore_unmapped": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nestedsortvalue.go b/typedapi/types/nestedsortvalue.go index c1c7f40c31..3147bb19b2 100644 --- a/typedapi/types/nestedsortvalue.go +++ b/typedapi/types/nestedsortvalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NestedSortValue type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/sort.ts#L30-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/sort.ts#L30-L35 type NestedSortValue struct { Filter *Query `json:"filter,omitempty"` MaxChildren *int `json:"max_children,omitempty"` @@ -61,7 +61,7 @@ func (s *NestedSortValue) UnmarshalJSON(data []byte) error { case "max_children": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nevercondition.go b/typedapi/types/nevercondition.go index 3cc27cb863..1e49757e44 100644 --- a/typedapi/types/nevercondition.go +++ b/typedapi/types/nevercondition.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NeverCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Conditions.ts#L69-L69 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Conditions.ts#L72-L72 type NeverCondition struct { } diff --git a/typedapi/types/ngramtokenfilter.go b/typedapi/types/ngramtokenfilter.go index a247779d6e..ebb50aac33 100644 --- a/typedapi/types/ngramtokenfilter.go +++ b/typedapi/types/ngramtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NGramTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L266-L271 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L268-L273 type NGramTokenFilter struct { MaxGram *int `json:"max_gram,omitempty"` MinGram *int `json:"min_gram,omitempty"` @@ -57,7 +57,7 @@ func (s *NGramTokenFilter) UnmarshalJSON(data []byte) error { case "max_gram": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -73,7 +73,7 @@ func (s *NGramTokenFilter) UnmarshalJSON(data []byte) error { case "min_gram": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/ngramtokenizer.go b/typedapi/types/ngramtokenizer.go index 82d49ef94b..4794417375 100644 --- a/typedapi/types/ngramtokenizer.go +++ b/typedapi/types/ngramtokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // NGramTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L39-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L39-L45 type NGramTokenizer struct { CustomTokenChars *string `json:"custom_token_chars,omitempty"` MaxGram int `json:"max_gram"` @@ -72,7 +72,7 @@ func (s *NGramTokenizer) UnmarshalJSON(data []byte) error { case "max_gram": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *NGramTokenizer) UnmarshalJSON(data []byte) error { case "min_gram": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nlpberttokenizationconfig.go b/typedapi/types/nlpberttokenizationconfig.go index d101346578..b229e8e1d0 100644 --- a/typedapi/types/nlpberttokenizationconfig.go +++ b/typedapi/types/nlpberttokenizationconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // NlpBertTokenizationConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L131-L158 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L131-L158 type NlpBertTokenizationConfig struct { // DoLowerCase Should the tokenizer lower case the text DoLowerCase *bool `json:"do_lower_case,omitempty"` @@ -65,7 +65,7 @@ func (s *NlpBertTokenizationConfig) UnmarshalJSON(data []byte) error { switch t { case "do_lower_case": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -80,7 +80,7 @@ func (s *NlpBertTokenizationConfig) UnmarshalJSON(data []byte) error { case "max_sequence_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *NlpBertTokenizationConfig) UnmarshalJSON(data []byte) error { case "span": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -116,7 +116,7 @@ func (s *NlpBertTokenizationConfig) UnmarshalJSON(data []byte) error { } case "with_special_tokens": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nlprobertatokenizationconfig.go b/typedapi/types/nlprobertatokenizationconfig.go index c793ddf6a5..7ea49e00d8 100644 --- a/typedapi/types/nlprobertatokenizationconfig.go +++ b/typedapi/types/nlprobertatokenizationconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // NlpRobertaTokenizationConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L160-L187 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L160-L187 type NlpRobertaTokenizationConfig struct { // AddPrefixSpace Should the tokenizer prefix input with a space character AddPrefixSpace *bool `json:"add_prefix_space,omitempty"` @@ -65,7 +65,7 @@ func (s *NlpRobertaTokenizationConfig) UnmarshalJSON(data []byte) error { switch t { case "add_prefix_space": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -80,7 +80,7 @@ func (s *NlpRobertaTokenizationConfig) UnmarshalJSON(data []byte) error { case "max_sequence_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *NlpRobertaTokenizationConfig) UnmarshalJSON(data []byte) error { case "span": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -116,7 +116,7 @@ func (s *NlpRobertaTokenizationConfig) UnmarshalJSON(data []byte) error { } case "with_special_tokens": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nlptokenizationupdateoptions.go b/typedapi/types/nlptokenizationupdateoptions.go index 1b26d3866d..605259c401 100644 --- a/typedapi/types/nlptokenizationupdateoptions.go +++ b/typedapi/types/nlptokenizationupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // NlpTokenizationUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L356-L361 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L356-L361 type NlpTokenizationUpdateOptions struct { // Span Span options to apply Span *int `json:"span,omitempty"` @@ -58,7 +58,7 @@ func (s *NlpTokenizationUpdateOptions) UnmarshalJSON(data []byte) error { case "span": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/node.go b/typedapi/types/node.go index d315a1fd61..1049072099 100644 --- a/typedapi/types/node.go +++ b/typedapi/types/node.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Node type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/searchable_snapshots/cache_stats/Response.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/searchable_snapshots/cache_stats/Response.ts#L30-L32 type Node struct { SharedCache Shared `json:"shared_cache"` } diff --git a/typedapi/types/nodeallocationexplanation.go b/typedapi/types/nodeallocationexplanation.go index 0975d3b9df..0f26836bdc 100644 --- a/typedapi/types/nodeallocationexplanation.go +++ b/typedapi/types/nodeallocationexplanation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // NodeAllocationExplanation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/allocation_explain/types.ts#L97-L106 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/allocation_explain/types.ts#L97-L106 type NodeAllocationExplanation struct { Deciders []AllocationDecision `json:"deciders"` NodeAttributes map[string]string `json:"node_attributes"` @@ -100,7 +100,7 @@ func (s *NodeAllocationExplanation) UnmarshalJSON(data []byte) error { case "weight_ranking": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodeattributes.go b/typedapi/types/nodeattributes.go index 61fb05235b..498e4e14bf 100644 --- a/typedapi/types/nodeattributes.go +++ b/typedapi/types/nodeattributes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // NodeAttributes type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Node.ts#L41-L58 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Node.ts#L41-L58 type NodeAttributes struct { // Attributes Lists node attributes. Attributes map[string]string `json:"attributes"` diff --git a/typedapi/types/nodeattributesrecord.go b/typedapi/types/nodeattributesrecord.go index 11a6ad8678..c7f824e28f 100644 --- a/typedapi/types/nodeattributesrecord.go +++ b/typedapi/types/nodeattributesrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeAttributesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/nodeattrs/types.ts#L20-L55 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/nodeattrs/types.ts#L20-L55 type NodeAttributesRecord struct { // Attr The attribute name. Attr *string `json:"attr,omitempty"` diff --git a/typedapi/types/nodebufferpool.go b/typedapi/types/nodebufferpool.go index 330940ddb9..de723cb947 100644 --- a/typedapi/types/nodebufferpool.go +++ b/typedapi/types/nodebufferpool.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeBufferPool type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L788-L809 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L788-L809 type NodeBufferPool struct { // Count Number of buffer pools. Count *int64 `json:"count,omitempty"` @@ -61,7 +61,7 @@ func (s *NodeBufferPool) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *NodeBufferPool) UnmarshalJSON(data []byte) error { s.TotalCapacity = &o case "total_capacity_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -115,7 +115,7 @@ func (s *NodeBufferPool) UnmarshalJSON(data []byte) error { s.Used = &o case "used_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodediskusage.go b/typedapi/types/nodediskusage.go index d2256dcbfa..c8d88a6e8b 100644 --- a/typedapi/types/nodediskusage.go +++ b/typedapi/types/nodediskusage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // NodeDiskUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/allocation_explain/types.ts#L56-L60 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/allocation_explain/types.ts#L56-L60 type NodeDiskUsage struct { LeastAvailable DiskUsage `json:"least_available"` MostAvailable DiskUsage `json:"most_available"` diff --git a/typedapi/types/nodeids.go b/typedapi/types/nodeids.go index 6860e37d28..5e74bb585c 100644 --- a/typedapi/types/nodeids.go +++ b/typedapi/types/nodeids.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodeIds type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L64-L64 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L64-L64 type NodeIds []string diff --git a/typedapi/types/nodeinfo.go b/typedapi/types/nodeinfo.go index 35786fd162..0132eb4dae 100644 --- a/typedapi/types/nodeinfo.go +++ b/typedapi/types/nodeinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // NodeInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L31-L67 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L31-L67 type NodeInfo struct { Aggregations map[string]NodeInfoAggregation `json:"aggregations,omitempty"` Attributes map[string]string `json:"attributes"` @@ -212,7 +212,7 @@ func (s *NodeInfo) UnmarshalJSON(data []byte) error { } case "total_indexing_buffer": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodeinfoaction.go b/typedapi/types/nodeinfoaction.go index 7eabfcf20b..b416ac0348 100644 --- a/typedapi/types/nodeinfoaction.go +++ b/typedapi/types/nodeinfoaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L181-L183 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L184-L186 type NodeInfoAction struct { DestructiveRequiresName string `json:"destructive_requires_name"` } diff --git a/typedapi/types/nodeinfoaggregation.go b/typedapi/types/nodeinfoaggregation.go index 2476d41748..890da69d61 100644 --- a/typedapi/types/nodeinfoaggregation.go +++ b/typedapi/types/nodeinfoaggregation.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodeInfoAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L232-L234 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L235-L237 type NodeInfoAggregation struct { Types []string `json:"types"` } diff --git a/typedapi/types/nodeinfobootstrap.go b/typedapi/types/nodeinfobootstrap.go index 6b506c583c..aec514c546 100644 --- a/typedapi/types/nodeinfobootstrap.go +++ b/typedapi/types/nodeinfobootstrap.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoBootstrap type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L201-L203 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L204-L206 type NodeInfoBootstrap struct { MemoryLock string `json:"memory_lock"` } diff --git a/typedapi/types/nodeinfoclient.go b/typedapi/types/nodeinfoclient.go index 435d2fa1d5..042df8f82a 100644 --- a/typedapi/types/nodeinfoclient.go +++ b/typedapi/types/nodeinfoclient.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoClient type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L185-L187 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L188-L190 type NodeInfoClient struct { Type string `json:"type"` } diff --git a/typedapi/types/nodeinfodiscover.go b/typedapi/types/nodeinfodiscover.go index 9bfdbd2a1b..f9bfd5f927 100644 --- a/typedapi/types/nodeinfodiscover.go +++ b/typedapi/types/nodeinfodiscover.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoDiscover type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L173-L179 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L173-L182 type NodeInfoDiscover struct { NodeInfoDiscover map[string]json.RawMessage `json:"-"` SeedHosts []string `json:"seed_hosts,omitempty"` @@ -98,7 +98,7 @@ func (s *NodeInfoDiscover) UnmarshalJSON(data []byte) error { func (s NodeInfoDiscover) MarshalJSON() ([]byte, error) { type opt NodeInfoDiscover // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/nodeinfohttp.go b/typedapi/types/nodeinfohttp.go index 65164c92ff..620983520e 100644 --- a/typedapi/types/nodeinfohttp.go +++ b/typedapi/types/nodeinfohttp.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoHttp type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L303-L308 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L306-L311 type NodeInfoHttp struct { BoundAddress []string `json:"bound_address"` MaxContentLength ByteSize `json:"max_content_length,omitempty"` @@ -65,7 +65,7 @@ func (s *NodeInfoHttp) UnmarshalJSON(data []byte) error { } case "max_content_length_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodeinfoingest.go b/typedapi/types/nodeinfoingest.go index cec447d8c6..5b5475d326 100644 --- a/typedapi/types/nodeinfoingest.go +++ b/typedapi/types/nodeinfoingest.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodeInfoIngest type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L224-L226 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L227-L229 type NodeInfoIngest struct { Processors []NodeInfoIngestProcessor `json:"processors"` } diff --git a/typedapi/types/nodeinfoingestdownloader.go b/typedapi/types/nodeinfoingestdownloader.go index 8a993b72ef..0daac40111 100644 --- a/typedapi/types/nodeinfoingestdownloader.go +++ b/typedapi/types/nodeinfoingestdownloader.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoIngestDownloader type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L128-L130 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L128-L130 type NodeInfoIngestDownloader struct { Enabled string `json:"enabled"` } diff --git a/typedapi/types/nodeinfoingestinfo.go b/typedapi/types/nodeinfoingestinfo.go index ced1f6959a..aba085f160 100644 --- a/typedapi/types/nodeinfoingestinfo.go +++ b/typedapi/types/nodeinfoingestinfo.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodeInfoIngestInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L124-L126 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L124-L126 type NodeInfoIngestInfo struct { Downloader NodeInfoIngestDownloader `json:"downloader"` } diff --git a/typedapi/types/nodeinfoingestprocessor.go b/typedapi/types/nodeinfoingestprocessor.go index 3f92996b92..31178bbbd3 100644 --- a/typedapi/types/nodeinfoingestprocessor.go +++ b/typedapi/types/nodeinfoingestprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoIngestProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L228-L230 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L231-L233 type NodeInfoIngestProcessor struct { Type string `json:"type"` } diff --git a/typedapi/types/nodeinfojvmmemory.go b/typedapi/types/nodeinfojvmmemory.go index 78818f1af7..669ab8ea5c 100644 --- a/typedapi/types/nodeinfojvmmemory.go +++ b/typedapi/types/nodeinfojvmmemory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoJvmMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L310-L321 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L313-L324 type NodeInfoJvmMemory struct { DirectMax ByteSize `json:"direct_max,omitempty"` DirectMaxInBytes int64 `json:"direct_max_in_bytes"` @@ -66,7 +66,7 @@ func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { } case "direct_max_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -86,7 +86,7 @@ func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { } case "heap_init_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -106,7 +106,7 @@ func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { } case "heap_max_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -126,7 +126,7 @@ func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { } case "non_heap_init_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -146,7 +146,7 @@ func (s *NodeInfoJvmMemory) UnmarshalJSON(data []byte) error { } case "non_heap_max_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodeinfomemory.go b/typedapi/types/nodeinfomemory.go index c93a6a75a1..cab31fe291 100644 --- a/typedapi/types/nodeinfomemory.go +++ b/typedapi/types/nodeinfomemory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L323-L326 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L326-L329 type NodeInfoMemory struct { Total string `json:"total"` TotalInBytes int64 `json:"total_in_bytes"` @@ -65,7 +65,7 @@ func (s *NodeInfoMemory) UnmarshalJSON(data []byte) error { s.Total = o case "total_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodeinfonetwork.go b/typedapi/types/nodeinfonetwork.go index d7856d61f9..904ca1b93f 100644 --- a/typedapi/types/nodeinfonetwork.go +++ b/typedapi/types/nodeinfonetwork.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoNetwork type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L328-L331 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L331-L334 type NodeInfoNetwork struct { PrimaryInterface NodeInfoNetworkInterface `json:"primary_interface"` RefreshInterval int `json:"refresh_interval"` @@ -59,7 +59,7 @@ func (s *NodeInfoNetwork) UnmarshalJSON(data []byte) error { case "refresh_interval": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodeinfonetworkinterface.go b/typedapi/types/nodeinfonetworkinterface.go index a88b0687e8..6337a8fc35 100644 --- a/typedapi/types/nodeinfonetworkinterface.go +++ b/typedapi/types/nodeinfonetworkinterface.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoNetworkInterface type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L333-L337 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L336-L340 type NodeInfoNetworkInterface struct { Address string `json:"address"` MacAddress string `json:"mac_address"` diff --git a/typedapi/types/nodeinfooscpu.go b/typedapi/types/nodeinfooscpu.go index e4f04f067a..c51239af6a 100644 --- a/typedapi/types/nodeinfooscpu.go +++ b/typedapi/types/nodeinfooscpu.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoOSCPU type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L339-L348 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L342-L351 type NodeInfoOSCPU struct { CacheSize string `json:"cache_size"` CacheSizeInBytes int `json:"cache_size_in_bytes"` @@ -72,7 +72,7 @@ func (s *NodeInfoOSCPU) UnmarshalJSON(data []byte) error { case "cache_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *NodeInfoOSCPU) UnmarshalJSON(data []byte) error { case "cores_per_socket": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -104,7 +104,7 @@ func (s *NodeInfoOSCPU) UnmarshalJSON(data []byte) error { case "mhz": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -132,7 +132,7 @@ func (s *NodeInfoOSCPU) UnmarshalJSON(data []byte) error { case "total_cores": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -148,7 +148,7 @@ func (s *NodeInfoOSCPU) UnmarshalJSON(data []byte) error { case "total_sockets": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodeinfopath.go b/typedapi/types/nodeinfopath.go index ecbb84ad0a..c49809e9ae 100644 --- a/typedapi/types/nodeinfopath.go +++ b/typedapi/types/nodeinfopath.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoPath type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L158-L163 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L158-L163 type NodeInfoPath struct { Data []string `json:"data,omitempty"` Home *string `json:"home,omitempty"` diff --git a/typedapi/types/nodeinforepositories.go b/typedapi/types/nodeinforepositories.go index b5df6acd7d..9b92eefdcb 100644 --- a/typedapi/types/nodeinforepositories.go +++ b/typedapi/types/nodeinforepositories.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodeInfoRepositories type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L165-L167 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L165-L167 type NodeInfoRepositories struct { Url NodeInfoRepositoriesUrl `json:"url"` } diff --git a/typedapi/types/nodeinforepositoriesurl.go b/typedapi/types/nodeinforepositoriesurl.go index 9e42aa8fe9..6e1c461f6e 100644 --- a/typedapi/types/nodeinforepositoriesurl.go +++ b/typedapi/types/nodeinforepositoriesurl.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoRepositoriesUrl type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L169-L171 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L169-L171 type NodeInfoRepositoriesUrl struct { AllowedUrls string `json:"allowed_urls"` } diff --git a/typedapi/types/nodeinfoscript.go b/typedapi/types/nodeinfoscript.go index 724c8c9d21..381bfa0a7d 100644 --- a/typedapi/types/nodeinfoscript.go +++ b/typedapi/types/nodeinfoscript.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,10 +31,10 @@ import ( // NodeInfoScript type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L281-L284 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L284-L287 type NodeInfoScript struct { - AllowedTypes string `json:"allowed_types"` - DisableMaxCompilationsRate string `json:"disable_max_compilations_rate"` + AllowedTypes string `json:"allowed_types"` + DisableMaxCompilationsRate *string `json:"disable_max_compilations_rate,omitempty"` } func (s *NodeInfoScript) UnmarshalJSON(data []byte) error { @@ -74,7 +74,7 @@ func (s *NodeInfoScript) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.DisableMaxCompilationsRate = o + s.DisableMaxCompilationsRate = &o } } diff --git a/typedapi/types/nodeinfosearch.go b/typedapi/types/nodeinfosearch.go index 153fca33e4..a1567f4997 100644 --- a/typedapi/types/nodeinfosearch.go +++ b/typedapi/types/nodeinfosearch.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodeInfoSearch type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L286-L288 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L289-L291 type NodeInfoSearch struct { Remote NodeInfoSearchRemote `json:"remote"` } diff --git a/typedapi/types/nodeinfosearchremote.go b/typedapi/types/nodeinfosearchremote.go index be26faf713..f2b5362e1a 100644 --- a/typedapi/types/nodeinfosearchremote.go +++ b/typedapi/types/nodeinfosearchremote.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoSearchRemote type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L290-L292 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L293-L295 type NodeInfoSearchRemote struct { Connect string `json:"connect"` } diff --git a/typedapi/types/nodeinfosettings.go b/typedapi/types/nodeinfosettings.go index 064359e264..89d2662319 100644 --- a/typedapi/types/nodeinfosettings.go +++ b/typedapi/types/nodeinfosettings.go @@ -16,17 +16,17 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodeInfoSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L69-L85 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L69-L85 type NodeInfoSettings struct { Action *NodeInfoAction `json:"action,omitempty"` Bootstrap *NodeInfoBootstrap `json:"bootstrap,omitempty"` - Client NodeInfoClient `json:"client"` + Client *NodeInfoClient `json:"client,omitempty"` Cluster NodeInfoSettingsCluster `json:"cluster"` Discovery *NodeInfoDiscover `json:"discovery,omitempty"` Http NodeInfoSettingsHttp `json:"http"` diff --git a/typedapi/types/nodeinfosettingscluster.go b/typedapi/types/nodeinfosettingscluster.go index 7e3c3f84cf..30f9e8f110 100644 --- a/typedapi/types/nodeinfosettingscluster.go +++ b/typedapi/types/nodeinfosettingscluster.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // NodeInfoSettingsCluster type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L132-L142 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L132-L142 type NodeInfoSettingsCluster struct { DeprecationIndexing *DeprecationIndexing `json:"deprecation_indexing,omitempty"` Election NodeInfoSettingsClusterElection `json:"election"` diff --git a/typedapi/types/nodeinfosettingsclusterelection.go b/typedapi/types/nodeinfosettingsclusterelection.go index 50cb2f15c5..3d2ff425c1 100644 --- a/typedapi/types/nodeinfosettingsclusterelection.go +++ b/typedapi/types/nodeinfosettingsclusterelection.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // NodeInfoSettingsClusterElection type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L148-L150 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L148-L150 type NodeInfoSettingsClusterElection struct { Strategy string `json:"strategy"` } diff --git a/typedapi/types/nodeinfosettingshttp.go b/typedapi/types/nodeinfosettingshttp.go index da9a9c673c..9a61273c8f 100644 --- a/typedapi/types/nodeinfosettingshttp.go +++ b/typedapi/types/nodeinfosettingshttp.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoSettingsHttp type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L189-L194 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L192-L197 type NodeInfoSettingsHttp struct { Compression string `json:"compression,omitempty"` Port string `json:"port,omitempty"` diff --git a/typedapi/types/nodeinfosettingshttptype.go b/typedapi/types/nodeinfosettingshttptype.go index 84fd6982a8..fe2e0baa52 100644 --- a/typedapi/types/nodeinfosettingshttptype.go +++ b/typedapi/types/nodeinfosettingshttptype.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoSettingsHttpType type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L196-L199 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L199-L202 type NodeInfoSettingsHttpType struct { Default string `json:"default"` } diff --git a/typedapi/types/nodeinfosettingsingest.go b/typedapi/types/nodeinfosettingsingest.go index c43f3c7ff8..37eb363290 100644 --- a/typedapi/types/nodeinfosettingsingest.go +++ b/typedapi/types/nodeinfosettingsingest.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodeInfoSettingsIngest type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L87-L122 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L87-L122 type NodeInfoSettingsIngest struct { Append *NodeInfoIngestInfo `json:"append,omitempty"` Attachment *NodeInfoIngestInfo `json:"attachment,omitempty"` diff --git a/typedapi/types/nodeinfosettingsnetwork.go b/typedapi/types/nodeinfosettingsnetwork.go index 46bd7a9e7e..17dbd53111 100644 --- a/typedapi/types/nodeinfosettingsnetwork.go +++ b/typedapi/types/nodeinfosettingsnetwork.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,9 +30,9 @@ import ( // NodeInfoSettingsNetwork type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L220-L222 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L223-L225 type NodeInfoSettingsNetwork struct { - Host string `json:"host"` + Host *string `json:"host,omitempty"` } func (s *NodeInfoSettingsNetwork) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/nodeinfosettingsnode.go b/typedapi/types/nodeinfosettingsnode.go index ada8fa1178..c5f67369ce 100644 --- a/typedapi/types/nodeinfosettingsnode.go +++ b/typedapi/types/nodeinfosettingsnode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoSettingsNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L152-L156 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L152-L156 type NodeInfoSettingsNode struct { Attr map[string]json.RawMessage `json:"attr"` MaxLocalStorageNodes *string `json:"max_local_storage_nodes,omitempty"` diff --git a/typedapi/types/nodeinfosettingstransport.go b/typedapi/types/nodeinfosettingstransport.go index 740fcf71bd..ce258372ee 100644 --- a/typedapi/types/nodeinfosettingstransport.go +++ b/typedapi/types/nodeinfosettingstransport.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoSettingsTransport type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L205-L209 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L208-L212 type NodeInfoSettingsTransport struct { Features *NodeInfoSettingsTransportFeatures `json:"features,omitempty"` Type NodeInfoSettingsTransportType `json:"type"` diff --git a/typedapi/types/nodeinfosettingstransportfeatures.go b/typedapi/types/nodeinfosettingstransportfeatures.go index 67b40fd328..2b13c68a38 100644 --- a/typedapi/types/nodeinfosettingstransportfeatures.go +++ b/typedapi/types/nodeinfosettingstransportfeatures.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoSettingsTransportFeatures type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L216-L218 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L219-L221 type NodeInfoSettingsTransportFeatures struct { XPack string `json:"x-pack"` } diff --git a/typedapi/types/nodeinfosettingstransporttype.go b/typedapi/types/nodeinfosettingstransporttype.go index 94eca56062..129b211ee1 100644 --- a/typedapi/types/nodeinfosettingstransporttype.go +++ b/typedapi/types/nodeinfosettingstransporttype.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoSettingsTransportType type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L211-L214 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L214-L217 type NodeInfoSettingsTransportType struct { Default string `json:"default"` } diff --git a/typedapi/types/nodeinfotransport.go b/typedapi/types/nodeinfotransport.go index ddfc930108..c61d94fa46 100644 --- a/typedapi/types/nodeinfotransport.go +++ b/typedapi/types/nodeinfotransport.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoTransport type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L350-L354 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L353-L357 type NodeInfoTransport struct { BoundAddress []string `json:"bound_address"` Profiles map[string]string `json:"profiles"` diff --git a/typedapi/types/nodeinfoxpack.go b/typedapi/types/nodeinfoxpack.go index 9b40f99a30..8e74a5f987 100644 --- a/typedapi/types/nodeinfoxpack.go +++ b/typedapi/types/nodeinfoxpack.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // NodeInfoXpack type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L236-L240 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L239-L243 type NodeInfoXpack struct { License *NodeInfoXpackLicense `json:"license,omitempty"` Notification map[string]json.RawMessage `json:"notification,omitempty"` diff --git a/typedapi/types/nodeinfoxpacklicense.go b/typedapi/types/nodeinfoxpacklicense.go index 84fc717006..dd0b598938 100644 --- a/typedapi/types/nodeinfoxpacklicense.go +++ b/typedapi/types/nodeinfoxpacklicense.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodeInfoXpackLicense type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L273-L275 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L276-L278 type NodeInfoXpackLicense struct { SelfGenerated NodeInfoXpackLicenseType `json:"self_generated"` } diff --git a/typedapi/types/nodeinfoxpacklicensetype.go b/typedapi/types/nodeinfoxpacklicensetype.go index 73fa2f866b..11518b1ddd 100644 --- a/typedapi/types/nodeinfoxpacklicensetype.go +++ b/typedapi/types/nodeinfoxpacklicensetype.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoXpackLicenseType type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L277-L279 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L280-L282 type NodeInfoXpackLicenseType struct { Type string `json:"type"` } diff --git a/typedapi/types/nodeinfoxpacksecurity.go b/typedapi/types/nodeinfoxpacksecurity.go index 67b74bbcfe..c1627508f4 100644 --- a/typedapi/types/nodeinfoxpacksecurity.go +++ b/typedapi/types/nodeinfoxpacksecurity.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoXpackSecurity type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L242-L247 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L245-L250 type NodeInfoXpackSecurity struct { Authc *NodeInfoXpackSecurityAuthc `json:"authc,omitempty"` Enabled string `json:"enabled"` diff --git a/typedapi/types/nodeinfoxpacksecurityauthc.go b/typedapi/types/nodeinfoxpacksecurityauthc.go index cde5967a85..f29f5e7da3 100644 --- a/typedapi/types/nodeinfoxpacksecurityauthc.go +++ b/typedapi/types/nodeinfoxpacksecurityauthc.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodeInfoXpackSecurityAuthc type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L253-L256 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L256-L259 type NodeInfoXpackSecurityAuthc struct { Realms NodeInfoXpackSecurityAuthcRealms `json:"realms"` Token NodeInfoXpackSecurityAuthcToken `json:"token"` diff --git a/typedapi/types/nodeinfoxpacksecurityauthcrealms.go b/typedapi/types/nodeinfoxpacksecurityauthcrealms.go index efc784a066..1074fbd9a4 100644 --- a/typedapi/types/nodeinfoxpacksecurityauthcrealms.go +++ b/typedapi/types/nodeinfoxpacksecurityauthcrealms.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodeInfoXpackSecurityAuthcRealms type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L258-L262 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L261-L265 type NodeInfoXpackSecurityAuthcRealms struct { File map[string]NodeInfoXpackSecurityAuthcRealmsStatus `json:"file,omitempty"` Native map[string]NodeInfoXpackSecurityAuthcRealmsStatus `json:"native,omitempty"` diff --git a/typedapi/types/nodeinfoxpacksecurityauthcrealmsstatus.go b/typedapi/types/nodeinfoxpacksecurityauthcrealmsstatus.go index bfc64418d3..bad0ecd441 100644 --- a/typedapi/types/nodeinfoxpacksecurityauthcrealmsstatus.go +++ b/typedapi/types/nodeinfoxpacksecurityauthcrealmsstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoXpackSecurityAuthcRealmsStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L268-L271 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L271-L274 type NodeInfoXpackSecurityAuthcRealmsStatus struct { Enabled *string `json:"enabled,omitempty"` Order string `json:"order"` diff --git a/typedapi/types/nodeinfoxpacksecurityauthctoken.go b/typedapi/types/nodeinfoxpacksecurityauthctoken.go index 7c85a4a544..f8b37549ee 100644 --- a/typedapi/types/nodeinfoxpacksecurityauthctoken.go +++ b/typedapi/types/nodeinfoxpacksecurityauthctoken.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeInfoXpackSecurityAuthcToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L264-L266 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L267-L269 type NodeInfoXpackSecurityAuthcToken struct { Enabled string `json:"enabled"` } diff --git a/typedapi/types/nodeinfoxpacksecurityssl.go b/typedapi/types/nodeinfoxpacksecurityssl.go index e6d2cf00fd..a2879a6df3 100644 --- a/typedapi/types/nodeinfoxpacksecurityssl.go +++ b/typedapi/types/nodeinfoxpacksecurityssl.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodeInfoXpackSecuritySsl type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L249-L251 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L252-L254 type NodeInfoXpackSecuritySsl struct { Ssl map[string]string `json:"ssl"` } diff --git a/typedapi/types/nodejvminfo.go b/typedapi/types/nodejvminfo.go index 11046e7dad..3b4dac267a 100644 --- a/typedapi/types/nodejvminfo.go +++ b/typedapi/types/nodejvminfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeJvmInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L356-L370 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L359-L373 type NodeJvmInfo struct { GcCollectors []string `json:"gc_collectors"` InputArguments []string `json:"input_arguments"` @@ -84,7 +84,7 @@ func (s *NodeJvmInfo) UnmarshalJSON(data []byte) error { case "pid": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -104,7 +104,7 @@ func (s *NodeJvmInfo) UnmarshalJSON(data []byte) error { } case "using_bundled_jdk", "bundled_jdk": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodeoperatingsysteminfo.go b/typedapi/types/nodeoperatingsysteminfo.go index b4b7012a75..c77c117cc7 100644 --- a/typedapi/types/nodeoperatingsysteminfo.go +++ b/typedapi/types/nodeoperatingsysteminfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeOperatingSystemInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L372-L389 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L375-L392 type NodeOperatingSystemInfo struct { // AllocatedProcessors The number of processors actually used to calculate thread pool size. This // number can be set with the node.processors setting of a node and defaults to @@ -70,7 +70,7 @@ func (s *NodeOperatingSystemInfo) UnmarshalJSON(data []byte) error { case "allocated_processors": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -98,7 +98,7 @@ func (s *NodeOperatingSystemInfo) UnmarshalJSON(data []byte) error { case "available_processors": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodepackagingtype.go b/typedapi/types/nodepackagingtype.go index 3c5b8e4ece..946170ef74 100644 --- a/typedapi/types/nodepackagingtype.go +++ b/typedapi/types/nodepackagingtype.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodePackagingType type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L526-L539 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L526-L539 type NodePackagingType struct { // Count Number of selected nodes using the distribution flavor and file type. Count int `json:"count"` @@ -58,7 +58,7 @@ func (s *NodePackagingType) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodeprocessinfo.go b/typedapi/types/nodeprocessinfo.go index 0bb0fc5c24..33edc7e538 100644 --- a/typedapi/types/nodeprocessinfo.go +++ b/typedapi/types/nodeprocessinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeProcessInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L391-L398 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L394-L401 type NodeProcessInfo struct { // Id Process identifier (PID) Id int64 `json:"id"` @@ -57,7 +57,7 @@ func (s *NodeProcessInfo) UnmarshalJSON(data []byte) error { switch t { case "id": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *NodeProcessInfo) UnmarshalJSON(data []byte) error { } case "mlockall": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodereloaderror.go b/typedapi/types/nodereloaderror.go index 22e40c88ab..6cde6a3c50 100644 --- a/typedapi/types/nodereloaderror.go +++ b/typedapi/types/nodereloaderror.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // NodeReloadError type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/NodeReloadResult.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/NodeReloadResult.ts#L24-L27 type NodeReloadError struct { Name string `json:"name"` ReloadException *ErrorCause `json:"reload_exception,omitempty"` diff --git a/typedapi/types/nodereloadresult.go b/typedapi/types/nodereloadresult.go index a8e816b556..88cd003fd0 100644 --- a/typedapi/types/nodereloadresult.go +++ b/typedapi/types/nodereloadresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // Stats // NodeReloadError // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/NodeReloadResult.ts#L29-L30 -type NodeReloadResult interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/NodeReloadResult.ts#L29-L30 +type NodeReloadResult any diff --git a/typedapi/types/nodescontext.go b/typedapi/types/nodescontext.go index 857f446ea0..24de809ea4 100644 --- a/typedapi/types/nodescontext.go +++ b/typedapi/types/nodescontext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodesContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L997-L1002 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L997-L1002 type NodesContext struct { CacheEvictions *int64 `json:"cache_evictions,omitempty"` CompilationLimitTriggered *int64 `json:"compilation_limit_triggered,omitempty"` @@ -55,7 +55,7 @@ func (s *NodesContext) UnmarshalJSON(data []byte) error { switch t { case "cache_evictions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *NodesContext) UnmarshalJSON(data []byte) error { } case "compilation_limit_triggered": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *NodesContext) UnmarshalJSON(data []byte) error { } case "compilations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodescredentials.go b/typedapi/types/nodescredentials.go index a4ffc00ad9..8ef26f25a6 100644 --- a/typedapi/types/nodescredentials.go +++ b/typedapi/types/nodescredentials.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodesCredentials type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_service_credentials/types.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_service_credentials/types.ts#L23-L28 type NodesCredentials struct { // FileTokens File-backed tokens collected from all nodes FileTokens map[string]NodesCredentialsFileToken `json:"file_tokens"` diff --git a/typedapi/types/nodescredentialsfiletoken.go b/typedapi/types/nodescredentialsfiletoken.go index 72c37a2782..f94a2b1890 100644 --- a/typedapi/types/nodescredentialsfiletoken.go +++ b/typedapi/types/nodescredentialsfiletoken.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodesCredentialsFileToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_service_credentials/types.ts#L30-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_service_credentials/types.ts#L30-L32 type NodesCredentialsFileToken struct { Nodes []string `json:"nodes"` } diff --git a/typedapi/types/nodeshard.go b/typedapi/types/nodeshard.go index 12058ac2b6..1ed5e4852f 100644 --- a/typedapi/types/nodeshard.go +++ b/typedapi/types/nodeshard.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,14 +33,14 @@ import ( // NodeShard type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Node.ts#L60-L71 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Node.ts#L60-L71 type NodeShard struct { AllocationId map[string]string `json:"allocation_id,omitempty"` Index string `json:"index"` Node *string `json:"node,omitempty"` Primary bool `json:"primary"` RecoverySource map[string]string `json:"recovery_source,omitempty"` - RelocatingNode string `json:"relocating_node,omitempty"` + RelocatingNode *string `json:"relocating_node,omitempty"` RelocationFailureInfo *RelocationFailureInfo `json:"relocation_failure_info,omitempty"` Shard int `json:"shard"` State shardroutingstate.ShardRoutingState `json:"state"` @@ -81,7 +81,7 @@ func (s *NodeShard) UnmarshalJSON(data []byte) error { } case "primary": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *NodeShard) UnmarshalJSON(data []byte) error { case "shard": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodeshutdownstatus.go b/typedapi/types/nodeshutdownstatus.go index 524d540497..ec5073c1b1 100644 --- a/typedapi/types/nodeshutdownstatus.go +++ b/typedapi/types/nodeshutdownstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // NodeShutdownStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L29-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L29-L38 type NodeShutdownStatus struct { NodeId string `json:"node_id"` PersistentTasks PersistentTaskStatus `json:"persistent_tasks"` diff --git a/typedapi/types/nodesindexingpressure.go b/typedapi/types/nodesindexingpressure.go index c0a20c4d96..1e05682421 100644 --- a/typedapi/types/nodesindexingpressure.go +++ b/typedapi/types/nodesindexingpressure.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodesIndexingPressure type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L116-L121 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L116-L121 type NodesIndexingPressure struct { // Memory Contains statistics for memory consumption from indexing load. Memory *NodesIndexingPressureMemory `json:"memory,omitempty"` diff --git a/typedapi/types/nodesindexingpressurememory.go b/typedapi/types/nodesindexingpressurememory.go index 3a4ebc042a..f276843bdf 100644 --- a/typedapi/types/nodesindexingpressurememory.go +++ b/typedapi/types/nodesindexingpressurememory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodesIndexingPressureMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L123-L142 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L123-L142 type NodesIndexingPressureMemory struct { // Current Contains statistics for current indexing load. Current *PressureMemory `json:"current,omitempty"` @@ -71,7 +71,7 @@ func (s *NodesIndexingPressureMemory) UnmarshalJSON(data []byte) error { } case "limit_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodesingest.go b/typedapi/types/nodesingest.go index 8eceabded2..83602a339c 100644 --- a/typedapi/types/nodesingest.go +++ b/typedapi/types/nodesingest.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // NodesIngest type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L345-L354 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L345-L354 type NodesIngest struct { // Pipelines Contains statistics about ingest pipelines for the node. Pipelines map[string]IngestTotal `json:"pipelines,omitempty"` diff --git a/typedapi/types/nodesrecord.go b/typedapi/types/nodesrecord.go index e6274f8bbd..0c5db1d37c 100644 --- a/typedapi/types/nodesrecord.go +++ b/typedapi/types/nodesrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/nodes/types.ts#L23-L542 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/nodes/types.ts#L23-L542 type NodesRecord struct { // Build The Elasticsearch build hash. Build *string `json:"build,omitempty"` diff --git a/typedapi/types/nodestatistics.go b/typedapi/types/nodestatistics.go index 287a8d53aa..94ada03b5a 100644 --- a/typedapi/types/nodestatistics.go +++ b/typedapi/types/nodestatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Node.ts#L28-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Node.ts#L28-L39 type NodeStatistics struct { // Failed Number of nodes that rejected the request or failed to respond. If this value // is not 0, a reason for the rejection or failure is included in the response. @@ -60,7 +60,7 @@ func (s *NodeStatistics) UnmarshalJSON(data []byte) error { case "failed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -81,7 +81,7 @@ func (s *NodeStatistics) UnmarshalJSON(data []byte) error { case "successful": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -97,7 +97,7 @@ func (s *NodeStatistics) UnmarshalJSON(data []byte) error { case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodetasks.go b/typedapi/types/nodetasks.go index 052d2f5325..48c9e75794 100644 --- a/typedapi/types/nodetasks.go +++ b/typedapi/types/nodetasks.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // NodeTasks type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/tasks/_types/TaskListResponseBase.ts#L49-L57 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/tasks/_types/TaskListResponseBase.ts#L49-L57 type NodeTasks struct { Attributes map[string]string `json:"attributes,omitempty"` Host *string `json:"host,omitempty"` diff --git a/typedapi/types/nodethreadpoolinfo.go b/typedapi/types/nodethreadpoolinfo.go index aa8d67a6a1..62f8487ed3 100644 --- a/typedapi/types/nodethreadpoolinfo.go +++ b/typedapi/types/nodethreadpoolinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // NodeThreadPoolInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/info/types.ts#L294-L301 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/info/types.ts#L297-L304 type NodeThreadPoolInfo struct { Core *int `json:"core,omitempty"` KeepAlive Duration `json:"keep_alive,omitempty"` @@ -58,7 +58,7 @@ func (s *NodeThreadPoolInfo) UnmarshalJSON(data []byte) error { case "core": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -79,7 +79,7 @@ func (s *NodeThreadPoolInfo) UnmarshalJSON(data []byte) error { case "max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *NodeThreadPoolInfo) UnmarshalJSON(data []byte) error { case "queue_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -111,7 +111,7 @@ func (s *NodeThreadPoolInfo) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/nodeusage.go b/typedapi/types/nodeusage.go index 622f3fd43a..f5a23563bb 100644 --- a/typedapi/types/nodeusage.go +++ b/typedapi/types/nodeusage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // NodeUsage type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/usage/types.ts#L25-L30 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/usage/types.ts#L25-L30 type NodeUsage struct { Aggregations map[string]json.RawMessage `json:"aggregations"` RestActions map[string]int `json:"rest_actions"` diff --git a/typedapi/types/norianalyzer.go b/typedapi/types/norianalyzer.go index e591a3185f..cb2f4247ca 100644 --- a/typedapi/types/norianalyzer.go +++ b/typedapi/types/norianalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // NoriAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/analyzers.ts#L66-L72 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/analyzers.ts#L66-L72 type NoriAnalyzer struct { DecompoundMode *noridecompoundmode.NoriDecompoundMode `json:"decompound_mode,omitempty"` Stoptags []string `json:"stoptags,omitempty"` diff --git a/typedapi/types/noripartofspeechtokenfilter.go b/typedapi/types/noripartofspeechtokenfilter.go index 117edadcb3..2651ed2eb4 100644 --- a/typedapi/types/noripartofspeechtokenfilter.go +++ b/typedapi/types/noripartofspeechtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // NoriPartOfSpeechTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L273-L276 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L275-L278 type NoriPartOfSpeechTokenFilter struct { Stoptags []string `json:"stoptags,omitempty"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/noritokenizer.go b/typedapi/types/noritokenizer.go index 63ab661d4d..9d1872318a 100644 --- a/typedapi/types/noritokenizer.go +++ b/typedapi/types/noritokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // NoriTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L81-L87 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L81-L87 type NoriTokenizer struct { DecompoundMode *noridecompoundmode.NoriDecompoundMode `json:"decompound_mode,omitempty"` DiscardPunctuation *bool `json:"discard_punctuation,omitempty"` @@ -64,7 +64,7 @@ func (s *NoriTokenizer) UnmarshalJSON(data []byte) error { } case "discard_punctuation": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/normalizeaggregation.go b/typedapi/types/normalizeaggregation.go index 4f8a37dbb4..b4aa47eb36 100644 --- a/typedapi/types/normalizeaggregation.go +++ b/typedapi/types/normalizeaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // NormalizeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L319-L324 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L319-L324 type NormalizeAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -44,10 +44,8 @@ type NormalizeAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` // Method The specific method to apply. Method *normalizemethod.NormalizeMethod `json:"method,omitempty"` - Name *string `json:"name,omitempty"` } func (s *NormalizeAggregation) UnmarshalJSON(data []byte) error { @@ -87,28 +85,11 @@ func (s *NormalizeAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "method": if err := dec.Decode(&s.Method); err != nil { return fmt.Errorf("%s | %w", "Method", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/normalizer.go b/typedapi/types/normalizer.go index be609d14c4..06e76b8e58 100644 --- a/typedapi/types/normalizer.go +++ b/typedapi/types/normalizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // LowercaseNormalizer // CustomNormalizer // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/normalizers.ts#L20-L24 -type Normalizer interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/normalizers.ts#L20-L24 +type Normalizer any diff --git a/typedapi/types/nullvalue.go b/typedapi/types/nullvalue.go new file mode 100755 index 0000000000..17bfbd9357 --- /dev/null +++ b/typedapi/types/nullvalue.go @@ -0,0 +1,29 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package types + +// NullValue is a custom type used to represent the concept of a null or missing value. +// It can be used as a placeholder for variables or fields that are not initialized, +// or to indicate that a specific piece of data is intentionally absent. +type NullValue struct{} + +// MarshalJSON converts the NullValue to JSON format. +// It always returns a "null" value as per JSON standard for null values. +func (n NullValue) MarshalJSON() ([]byte, error) { + return []byte("null"), nil +} diff --git a/typedapi/types/numberrangequery.go b/typedapi/types/numberrangequery.go index 8814875bb2..092036eff7 100644 --- a/typedapi/types/numberrangequery.go +++ b/typedapi/types/numberrangequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // NumberRangeQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L145-L164 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L157-L157 type NumberRangeQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -41,7 +41,7 @@ type NumberRangeQuery struct { // A boost value between 0 and 1.0 decreases the relevance score. // A value greater than 1.0 increases the relevance score. Boost *float32 `json:"boost,omitempty"` - From Float64 `json:"from,omitempty"` + From *Float64 `json:"from,omitempty"` // Gt Greater than. Gt *Float64 `json:"gt,omitempty"` // Gte Greater than or equal to. @@ -53,7 +53,7 @@ type NumberRangeQuery struct { QueryName_ *string `json:"_name,omitempty"` // Relation Indicates how the range query matches values for `range` fields. Relation *rangerelation.RangeRelation `json:"relation,omitempty"` - To Float64 `json:"to,omitempty"` + To *Float64 `json:"to,omitempty"` } func (s *NumberRangeQuery) UnmarshalJSON(data []byte) error { @@ -72,7 +72,7 @@ func (s *NumberRangeQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -93,7 +93,7 @@ func (s *NumberRangeQuery) UnmarshalJSON(data []byte) error { } case "gt": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -109,7 +109,7 @@ func (s *NumberRangeQuery) UnmarshalJSON(data []byte) error { } case "gte": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *NumberRangeQuery) UnmarshalJSON(data []byte) error { } case "lt": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -141,7 +141,7 @@ func (s *NumberRangeQuery) UnmarshalJSON(data []byte) error { } case "lte": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/numericdecayfunction.go b/typedapi/types/numericdecayfunction.go index e5fde64dd5..e5d68a82be 100644 --- a/typedapi/types/numericdecayfunction.go +++ b/typedapi/types/numericdecayfunction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -29,19 +29,19 @@ import ( // NumericDecayFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L182-L184 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L192-L192 type NumericDecayFunction struct { + DecayFunctionBasedoubledouble map[string]DecayPlacementdoubledouble `json:"-"` // MultiValueMode Determines how the distance is calculated when a field used for computing the // decay contains multiple values. - MultiValueMode *multivaluemode.MultiValueMode `json:"multi_value_mode,omitempty"` - NumericDecayFunction map[string]DecayPlacementdoubledouble `json:"-"` + MultiValueMode *multivaluemode.MultiValueMode `json:"multi_value_mode,omitempty"` } // MarhsalJSON overrides marshalling for types with additional properties func (s NumericDecayFunction) MarshalJSON() ([]byte, error) { type opt NumericDecayFunction // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { @@ -53,10 +53,10 @@ func (s NumericDecayFunction) MarshalJSON() ([]byte, error) { } // We inline the additional fields from the underlying map - for key, value := range s.NumericDecayFunction { + for key, value := range s.DecayFunctionBasedoubledouble { tmp[fmt.Sprintf("%s", key)] = value } - delete(tmp, "NumericDecayFunction") + delete(tmp, "DecayFunctionBasedoubledouble") data, err = json.Marshal(tmp) if err != nil { @@ -69,7 +69,7 @@ func (s NumericDecayFunction) MarshalJSON() ([]byte, error) { // NewNumericDecayFunction returns a NumericDecayFunction. func NewNumericDecayFunction() *NumericDecayFunction { r := &NumericDecayFunction{ - NumericDecayFunction: make(map[string]DecayPlacementdoubledouble, 0), + DecayFunctionBasedoubledouble: make(map[string]DecayPlacementdoubledouble, 0), } return r diff --git a/typedapi/types/numericfielddata.go b/typedapi/types/numericfielddata.go index c4467994a3..66bfbddb55 100644 --- a/typedapi/types/numericfielddata.go +++ b/typedapi/types/numericfielddata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // NumericFielddata type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/NumericFielddata.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/NumericFielddata.ts#L22-L24 type NumericFielddata struct { Format numericfielddataformat.NumericFielddataFormat `json:"format"` } diff --git a/typedapi/types/objectproperty.go b/typedapi/types/objectproperty.go index 3eb23f458c..fed9e4b9c6 100644 --- a/typedapi/types/objectproperty.go +++ b/typedapi/types/objectproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // ObjectProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/complex.ts#L46-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/complex.ts#L46-L50 type ObjectProperty struct { CopyTo []string `json:"copy_to,omitempty"` Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` @@ -86,7 +86,7 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -106,7 +106,7 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -127,7 +127,7 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -217,12 +217,6 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -241,6 +235,18 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -397,6 +403,12 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -408,7 +420,7 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -437,7 +449,7 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -458,7 +470,7 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -548,12 +560,6 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -572,6 +578,18 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -728,6 +746,12 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -750,7 +774,7 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -764,7 +788,7 @@ func (s *ObjectProperty) UnmarshalJSON(data []byte) error { } case "subobjects": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/onehotencodingpreprocessor.go b/typedapi/types/onehotencodingpreprocessor.go index e3069f00f8..20429e0e29 100644 --- a/typedapi/types/onehotencodingpreprocessor.go +++ b/typedapi/types/onehotencodingpreprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // OneHotEncodingPreprocessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model/types.ts#L44-L47 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model/types.ts#L44-L47 type OneHotEncodingPreprocessor struct { Field string `json:"field"` HotMap map[string]string `json:"hot_map"` diff --git a/typedapi/types/operatingsystem.go b/typedapi/types/operatingsystem.go index ca7eda81f0..10cf793d68 100644 --- a/typedapi/types/operatingsystem.go +++ b/typedapi/types/operatingsystem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // OperatingSystem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L945-L951 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L945-L951 type OperatingSystem struct { Cgroup *Cgroup `json:"cgroup,omitempty"` Cpu *Cpu `json:"cpu,omitempty"` @@ -76,7 +76,7 @@ func (s *OperatingSystem) UnmarshalJSON(data []byte) error { } case "timestamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/operatingsystemmemoryinfo.go b/typedapi/types/operatingsystemmemoryinfo.go index 57c4de6e89..88ba066c11 100644 --- a/typedapi/types/operatingsystemmemoryinfo.go +++ b/typedapi/types/operatingsystemmemoryinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // OperatingSystemMemoryInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/stats/types.ts#L541-L568 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/stats/types.ts#L541-L568 type OperatingSystemMemoryInfo struct { // AdjustedTotalInBytes Total amount, in bytes, of memory across all selected nodes, but using the // value specified using the `es.total_memory_bytes` system property instead of @@ -65,7 +65,7 @@ func (s *OperatingSystemMemoryInfo) UnmarshalJSON(data []byte) error { switch t { case "adjusted_total_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -80,7 +80,7 @@ func (s *OperatingSystemMemoryInfo) UnmarshalJSON(data []byte) error { } case "free_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *OperatingSystemMemoryInfo) UnmarshalJSON(data []byte) error { case "free_percent": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -111,7 +111,7 @@ func (s *OperatingSystemMemoryInfo) UnmarshalJSON(data []byte) error { } case "total_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -126,7 +126,7 @@ func (s *OperatingSystemMemoryInfo) UnmarshalJSON(data []byte) error { } case "used_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -142,7 +142,7 @@ func (s *OperatingSystemMemoryInfo) UnmarshalJSON(data []byte) error { case "used_percent": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/operationcontainer.go b/typedapi/types/operationcontainer.go index 7f8b051e06..d698b3e3e4 100644 --- a/typedapi/types/operationcontainer.go +++ b/typedapi/types/operationcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // OperationContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/bulk/types.ts#L145-L167 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/bulk/types.ts#L145-L167 type OperationContainer struct { // Create Indexes the specified document if it does not already exist. // The following line must contain the source data to be indexed. diff --git a/typedapi/types/outlierdetectionparameters.go b/typedapi/types/outlierdetectionparameters.go index e1a7e972c8..0f8b8317af 100644 --- a/typedapi/types/outlierdetectionparameters.go +++ b/typedapi/types/outlierdetectionparameters.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // OutlierDetectionParameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L527-L561 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L527-L561 type OutlierDetectionParameters struct { // ComputeFeatureInfluence Specifies whether the feature influence calculation is enabled. ComputeFeatureInfluence *bool `json:"compute_feature_influence,omitempty"` @@ -80,7 +80,7 @@ func (s *OutlierDetectionParameters) UnmarshalJSON(data []byte) error { switch t { case "compute_feature_influence": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -94,7 +94,7 @@ func (s *OutlierDetectionParameters) UnmarshalJSON(data []byte) error { } case "feature_influence_threshold": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -123,7 +123,7 @@ func (s *OutlierDetectionParameters) UnmarshalJSON(data []byte) error { case "n_neighbors": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -138,7 +138,7 @@ func (s *OutlierDetectionParameters) UnmarshalJSON(data []byte) error { } case "outlier_fraction": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -154,7 +154,7 @@ func (s *OutlierDetectionParameters) UnmarshalJSON(data []byte) error { } case "standardization_enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/overallbucket.go b/typedapi/types/overallbucket.go index c8439503d5..ca1bcabc9a 100644 --- a/typedapi/types/overallbucket.go +++ b/typedapi/types/overallbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // OverallBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Bucket.ts#L130-L145 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Bucket.ts#L130-L145 type OverallBucket struct { // BucketSpan The length of the bucket in seconds. Matches the job with the longest // bucket_span value. @@ -72,7 +72,7 @@ func (s *OverallBucket) UnmarshalJSON(data []byte) error { } case "is_interim": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *OverallBucket) UnmarshalJSON(data []byte) error { } case "overall_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/overallbucketjob.go b/typedapi/types/overallbucketjob.go index b78cdd242d..a4750ea05e 100644 --- a/typedapi/types/overallbucketjob.go +++ b/typedapi/types/overallbucketjob.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // OverallBucketJob type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Bucket.ts#L146-L149 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Bucket.ts#L146-L149 type OverallBucketJob struct { JobId string `json:"job_id"` MaxAnomalyScore Float64 `json:"max_anomaly_score"` @@ -58,7 +58,7 @@ func (s *OverallBucketJob) UnmarshalJSON(data []byte) error { } case "max_anomaly_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/overlapping.go b/typedapi/types/overlapping.go index e37700258a..0b1d53a03a 100644 --- a/typedapi/types/overlapping.go +++ b/typedapi/types/overlapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // Overlapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L39-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L39-L42 type Overlapping struct { IndexPatterns []string `json:"index_patterns"` Name string `json:"name"` diff --git a/typedapi/types/page.go b/typedapi/types/page.go index f1e629ff51..eaf47e66f1 100644 --- a/typedapi/types/page.go +++ b/typedapi/types/page.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Page type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Page.ts#L22-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Page.ts#L22-L33 type Page struct { // From Skips the specified number of items. From *int `json:"from,omitempty"` @@ -56,7 +56,7 @@ func (s *Page) UnmarshalJSON(data []byte) error { case "from": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *Page) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/pagerdutyaction.go b/typedapi/types/pagerdutyaction.go index 019ec377a8..e45017d9ca 100644 --- a/typedapi/types/pagerdutyaction.go +++ b/typedapi/types/pagerdutyaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // PagerDutyAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L54-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L54-L54 type PagerDutyAction struct { Account *string `json:"account,omitempty"` AttachPayload bool `json:"attach_payload"` @@ -74,7 +74,7 @@ func (s *PagerDutyAction) UnmarshalJSON(data []byte) error { s.Account = &o case "attach_payload": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/pagerdutycontext.go b/typedapi/types/pagerdutycontext.go index fcf2785111..382ffc1802 100644 --- a/typedapi/types/pagerdutycontext.go +++ b/typedapi/types/pagerdutycontext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // PagerDutyContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L61-L65 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L61-L65 type PagerDutyContext struct { Href *string `json:"href,omitempty"` Src *string `json:"src,omitempty"` diff --git a/typedapi/types/pagerdutyevent.go b/typedapi/types/pagerdutyevent.go index 3fe2ab7cb3..c2c27307d5 100644 --- a/typedapi/types/pagerdutyevent.go +++ b/typedapi/types/pagerdutyevent.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // PagerDutyEvent type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L40-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L40-L52 type PagerDutyEvent struct { Account *string `json:"account,omitempty"` AttachPayload bool `json:"attach_payload"` @@ -74,7 +74,7 @@ func (s *PagerDutyEvent) UnmarshalJSON(data []byte) error { s.Account = &o case "attach_payload": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/pagerdutyeventproxy.go b/typedapi/types/pagerdutyeventproxy.go index a7545ec0ac..9331dd3677 100644 --- a/typedapi/types/pagerdutyeventproxy.go +++ b/typedapi/types/pagerdutyeventproxy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PagerDutyEventProxy type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L56-L59 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L56-L59 type PagerDutyEventProxy struct { Host *string `json:"host,omitempty"` Port *int `json:"port,omitempty"` @@ -59,7 +59,7 @@ func (s *PagerDutyEventProxy) UnmarshalJSON(data []byte) error { case "port": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/pagerdutyresult.go b/typedapi/types/pagerdutyresult.go index 11be7f7134..0fcb886216 100644 --- a/typedapi/types/pagerdutyresult.go +++ b/typedapi/types/pagerdutyresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PagerDutyResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L78-L83 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L78-L83 type PagerDutyResult struct { Event PagerDutyEvent `json:"event"` Reason *string `json:"reason,omitempty"` diff --git a/typedapi/types/painlesscontextsetup.go b/typedapi/types/painlesscontextsetup.go index a000850691..39add9c106 100644 --- a/typedapi/types/painlesscontextsetup.go +++ b/typedapi/types/painlesscontextsetup.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // PainlessContextSetup type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/scripts_painless_execute/types.ts#L25-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/scripts_painless_execute/types.ts#L25-L39 type PainlessContextSetup struct { // Document Document that’s temporarily indexed in-memory and accessible from the script. Document json.RawMessage `json:"document,omitempty"` diff --git a/typedapi/types/parentaggregate.go b/typedapi/types/parentaggregate.go index 225bdf1b61..eee2f755ad 100644 --- a/typedapi/types/parentaggregate.go +++ b/typedapi/types/parentaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // ParentAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L779-L780 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L786-L787 type ParentAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -55,7 +55,7 @@ func (s *ParentAggregate) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +519,7 @@ func (s *ParentAggregate) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -569,7 +569,7 @@ func (s *ParentAggregate) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -579,7 +579,7 @@ func (s *ParentAggregate) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -596,7 +596,7 @@ func (s *ParentAggregate) UnmarshalJSON(data []byte) error { func (s ParentAggregate) MarshalJSON() ([]byte, error) { type opt ParentAggregate // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/parentaggregation.go b/typedapi/types/parentaggregation.go index cee737b69a..fb91956076 100644 --- a/typedapi/types/parentaggregation.go +++ b/typedapi/types/parentaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,15 +26,12 @@ import ( "errors" "fmt" "io" - "strconv" ) // ParentAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L643-L648 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L645-L650 type ParentAggregation struct { - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` // Type The child type that should be selected. Type *string `json:"type,omitempty"` } @@ -54,23 +51,6 @@ func (s *ParentAggregation) UnmarshalJSON(data []byte) error { switch t { - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "type": if err := dec.Decode(&s.Type); err != nil { return fmt.Errorf("%s | %w", "Type", err) diff --git a/typedapi/types/parentidquery.go b/typedapi/types/parentidquery.go index 7d8ff49bce..6396870761 100644 --- a/typedapi/types/parentidquery.go +++ b/typedapi/types/parentidquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ParentIdQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/joining.ts#L132-L146 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/joining.ts#L132-L146 type ParentIdQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -65,7 +65,7 @@ func (s *ParentIdQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -86,7 +86,7 @@ func (s *ParentIdQuery) UnmarshalJSON(data []byte) error { } case "ignore_unmapped": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/parenttaskinfo.go b/typedapi/types/parenttaskinfo.go index 372e817b3a..fd7a09a24f 100644 --- a/typedapi/types/parenttaskinfo.go +++ b/typedapi/types/parenttaskinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ParentTaskInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/tasks/_types/TaskListResponseBase.ts#L45-L47 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/tasks/_types/TaskListResponseBase.ts#L45-L47 type ParentTaskInfo struct { Action string `json:"action"` Cancellable bool `json:"cancellable"` @@ -78,7 +78,7 @@ func (s *ParentTaskInfo) UnmarshalJSON(data []byte) error { s.Action = o case "cancellable": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *ParentTaskInfo) UnmarshalJSON(data []byte) error { } case "cancelled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -131,7 +131,7 @@ func (s *ParentTaskInfo) UnmarshalJSON(data []byte) error { } case "id": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/passthroughinferenceoptions.go b/typedapi/types/passthroughinferenceoptions.go index b7f189b349..fb13a41250 100644 --- a/typedapi/types/passthroughinferenceoptions.go +++ b/typedapi/types/passthroughinferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PassThroughInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L224-L231 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L224-L231 type PassThroughInferenceOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. diff --git a/typedapi/types/passthroughinferenceupdateoptions.go b/typedapi/types/passthroughinferenceupdateoptions.go index 56253db7d6..da3246b1a5 100644 --- a/typedapi/types/passthroughinferenceupdateoptions.go +++ b/typedapi/types/passthroughinferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PassThroughInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L385-L390 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L385-L390 type PassThroughInferenceUpdateOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. diff --git a/typedapi/types/pathhierarchytokenizer.go b/typedapi/types/pathhierarchytokenizer.go index 5cd0708d42..d13eb9a930 100644 --- a/typedapi/types/pathhierarchytokenizer.go +++ b/typedapi/types/pathhierarchytokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PathHierarchyTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L89-L96 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L89-L96 type PathHierarchyTokenizer struct { BufferSize Stringifiedinteger `json:"buffer_size,omitempty"` Delimiter *string `json:"delimiter,omitempty"` diff --git a/typedapi/types/patternanalyzer.go b/typedapi/types/patternanalyzer.go index 85cd82e2f3..d089f10aa3 100644 --- a/typedapi/types/patternanalyzer.go +++ b/typedapi/types/patternanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PatternAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/analyzers.ts#L74-L81 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/analyzers.ts#L74-L81 type PatternAnalyzer struct { Flags *string `json:"flags,omitempty"` Lowercase *bool `json:"lowercase,omitempty"` @@ -69,7 +69,7 @@ func (s *PatternAnalyzer) UnmarshalJSON(data []byte) error { s.Flags = &o case "lowercase": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/patterncapturetokenfilter.go b/typedapi/types/patterncapturetokenfilter.go index b979737ac8..683c1a0a29 100644 --- a/typedapi/types/patterncapturetokenfilter.go +++ b/typedapi/types/patterncapturetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // PatternCaptureTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L278-L282 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L280-L284 type PatternCaptureTokenFilter struct { Patterns []string `json:"patterns"` PreserveOriginal Stringifiedboolean `json:"preserve_original,omitempty"` diff --git a/typedapi/types/patternreplacecharfilter.go b/typedapi/types/patternreplacecharfilter.go index e80d6f6b56..e4d1b470f4 100644 --- a/typedapi/types/patternreplacecharfilter.go +++ b/typedapi/types/patternreplacecharfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PatternReplaceCharFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/char_filters.ts#L53-L58 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/char_filters.ts#L54-L59 type PatternReplaceCharFilter struct { Flags *string `json:"flags,omitempty"` Pattern string `json:"pattern"` diff --git a/typedapi/types/patternreplacetokenfilter.go b/typedapi/types/patternreplacetokenfilter.go index c5d5c7b454..6e0409d64b 100644 --- a/typedapi/types/patternreplacetokenfilter.go +++ b/typedapi/types/patternreplacetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PatternReplaceTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L284-L290 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L286-L292 type PatternReplaceTokenFilter struct { All *bool `json:"all,omitempty"` Flags *string `json:"flags,omitempty"` @@ -57,7 +57,7 @@ func (s *PatternReplaceTokenFilter) UnmarshalJSON(data []byte) error { switch t { case "all": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/patterntokenizer.go b/typedapi/types/patterntokenizer.go index 88e63b242a..ccfbd96c96 100644 --- a/typedapi/types/patterntokenizer.go +++ b/typedapi/types/patterntokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PatternTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L98-L103 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L98-L103 type PatternTokenizer struct { Flags *string `json:"flags,omitempty"` Group *int `json:"group,omitempty"` @@ -69,7 +69,7 @@ func (s *PatternTokenizer) UnmarshalJSON(data []byte) error { case "group": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/pendingtask.go b/typedapi/types/pendingtask.go index 5d9ca1588e..a01915d261 100644 --- a/typedapi/types/pendingtask.go +++ b/typedapi/types/pendingtask.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PendingTask type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/pending_tasks/types.ts#L23-L47 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/pending_tasks/types.ts#L23-L47 type PendingTask struct { // Executing Indicates whether the pending tasks are currently executing or not. Executing bool `json:"executing"` @@ -68,7 +68,7 @@ func (s *PendingTask) UnmarshalJSON(data []byte) error { switch t { case "executing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *PendingTask) UnmarshalJSON(data []byte) error { case "insert_order": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/pendingtasksrecord.go b/typedapi/types/pendingtasksrecord.go index e793e7a469..5da4dfe9d3 100644 --- a/typedapi/types/pendingtasksrecord.go +++ b/typedapi/types/pendingtasksrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PendingTasksRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/pending_tasks/types.ts#L20-L41 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/pending_tasks/types.ts#L20-L41 type PendingTasksRecord struct { // InsertOrder The task insertion order. InsertOrder *string `json:"insertOrder,omitempty"` diff --git a/typedapi/types/percentage.go b/typedapi/types/percentage.go index 9e59b60f66..9ebb0e5351 100644 --- a/typedapi/types/percentage.go +++ b/typedapi/types/percentage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // string // float32 // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Numeric.ts#L28-L28 -type Percentage interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Numeric.ts#L28-L28 +type Percentage any diff --git a/typedapi/types/percentagescoreheuristic.go b/typedapi/types/percentagescoreheuristic.go index 946e067937..a2710f9f61 100644 --- a/typedapi/types/percentagescoreheuristic.go +++ b/typedapi/types/percentagescoreheuristic.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // PercentageScoreHeuristic type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L764-L764 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L766-L766 type PercentageScoreHeuristic struct { } diff --git a/typedapi/types/percentileranksaggregation.go b/typedapi/types/percentileranksaggregation.go index 53a69fdce5..7477dada16 100644 --- a/typedapi/types/percentileranksaggregation.go +++ b/typedapi/types/percentileranksaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PercentileRanksAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L174-L193 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L174-L193 type PercentileRanksAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -51,7 +51,7 @@ type PercentileRanksAggregation struct { // percentile ranks. Tdigest *TDigest `json:"tdigest,omitempty"` // Values An array of values for which to calculate the percentile ranks. - Values []Float64 `json:"values,omitempty"` + Values *[]Float64 `json:"values,omitempty"` } func (s *PercentileRanksAggregation) UnmarshalJSON(data []byte) error { @@ -92,7 +92,7 @@ func (s *PercentileRanksAggregation) UnmarshalJSON(data []byte) error { } case "keyed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/percentiles.go b/typedapi/types/percentiles.go index 3b3c027368..2f2f654a2b 100644 --- a/typedapi/types/percentiles.go +++ b/typedapi/types/percentiles.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // KeyedPercentiles // []ArrayPercentilesItem // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L150-L151 -type Percentiles interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L150-L151 +type Percentiles any diff --git a/typedapi/types/percentilesaggregation.go b/typedapi/types/percentilesaggregation.go index 083613db91..586cfe7e47 100644 --- a/typedapi/types/percentilesaggregation.go +++ b/typedapi/types/percentilesaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PercentilesAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L195-L214 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L195-L214 type PercentilesAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -92,7 +92,7 @@ func (s *PercentilesAggregation) UnmarshalJSON(data []byte) error { } case "keyed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/percentilesbucketaggregate.go b/typedapi/types/percentilesbucketaggregate.go index 379b4a8ea4..d81da2f687 100644 --- a/typedapi/types/percentilesbucketaggregate.go +++ b/typedapi/types/percentilesbucketaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // PercentilesBucketAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L178-L179 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L178-L179 type PercentilesBucketAggregate struct { Meta Metadata `json:"meta,omitempty"` Values Percentiles `json:"values"` diff --git a/typedapi/types/percentilesbucketaggregation.go b/typedapi/types/percentilesbucketaggregation.go index 2f2fa4bcc1..f7fa8e5466 100644 --- a/typedapi/types/percentilesbucketaggregation.go +++ b/typedapi/types/percentilesbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // PercentilesBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L354-L359 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L354-L359 type PercentilesBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,8 +43,6 @@ type PercentilesBucketAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` // Percents The list of percentiles to calculate. Percents []Float64 `json:"percents,omitempty"` } @@ -86,23 +84,6 @@ func (s *PercentilesBucketAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "percents": if err := dec.Decode(&s.Percents); err != nil { return fmt.Errorf("%s | %w", "Percents", err) diff --git a/typedapi/types/percolatequery.go b/typedapi/types/percolatequery.go index b1ca77a7d7..08b3590af7 100644 --- a/typedapi/types/percolatequery.go +++ b/typedapi/types/percolatequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PercolateQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L193-L230 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L202-L239 type PercolateQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -78,7 +78,7 @@ func (s *PercolateQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/percolatorproperty.go b/typedapi/types/percolatorproperty.go index 3e60cbd3e6..4cd152af6c 100644 --- a/typedapi/types/percolatorproperty.go +++ b/typedapi/types/percolatorproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // PercolatorProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L180-L182 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L188-L190 type PercolatorProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -71,7 +71,7 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -92,7 +92,7 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -182,12 +182,6 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -206,6 +200,18 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -362,6 +368,12 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -373,7 +385,7 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -402,7 +414,7 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -423,7 +435,7 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -513,12 +525,6 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -537,6 +543,18 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -693,6 +711,12 @@ func (s *PercolatorProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { diff --git a/typedapi/types/perpartitioncategorization.go b/typedapi/types/perpartitioncategorization.go index 59f94ee397..90532ccc18 100644 --- a/typedapi/types/perpartitioncategorization.go +++ b/typedapi/types/perpartitioncategorization.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PerPartitionCategorization type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Analysis.ts#L150-L159 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Analysis.ts#L150-L159 type PerPartitionCategorization struct { // Enabled To enable this setting, you must also set the `partition_field_name` property // to the same value in every detector that uses the keyword `mlcategory`. @@ -62,7 +62,7 @@ func (s *PerPartitionCategorization) UnmarshalJSON(data []byte) error { switch t { case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -76,7 +76,7 @@ func (s *PerPartitionCategorization) UnmarshalJSON(data []byte) error { } case "stop_on_warn": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/persistenttaskstatus.go b/typedapi/types/persistenttaskstatus.go index 8f0a479e60..062c6e39eb 100644 --- a/typedapi/types/persistenttaskstatus.go +++ b/typedapi/types/persistenttaskstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // PersistentTaskStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L56-L58 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L56-L58 type PersistentTaskStatus struct { Status shutdownstatus.ShutdownStatus `json:"status"` } diff --git a/typedapi/types/phase.go b/typedapi/types/phase.go index 723adc825f..01d440eff9 100644 --- a/typedapi/types/phase.go +++ b/typedapi/types/phase.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // Phase type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/_types/Phase.ts#L25-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/_types/Phase.ts#L25-L36 type Phase struct { Actions json.RawMessage `json:"actions,omitempty"` Configurations *Configurations `json:"configurations,omitempty"` diff --git a/typedapi/types/phases.go b/typedapi/types/phases.go index 1f9b4bf51b..a9ec354781 100644 --- a/typedapi/types/phases.go +++ b/typedapi/types/phases.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Phases type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/_types/Phase.ts#L38-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/_types/Phase.ts#L38-L44 type Phases struct { Cold *Phase `json:"cold,omitempty"` Delete *Phase `json:"delete,omitempty"` diff --git a/typedapi/types/phonetictokenfilter.go b/typedapi/types/phonetictokenfilter.go index fa1ed5d9d0..363d8660d4 100644 --- a/typedapi/types/phonetictokenfilter.go +++ b/typedapi/types/phonetictokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -36,7 +36,7 @@ import ( // PhoneticTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/phonetic-plugin.ts#L64-L72 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/phonetic-plugin.ts#L64-L72 type PhoneticTokenFilter struct { Encoder phoneticencoder.PhoneticEncoder `json:"encoder"` Languageset []phoneticlanguage.PhoneticLanguage `json:"languageset"` @@ -69,13 +69,24 @@ func (s *PhoneticTokenFilter) UnmarshalJSON(data []byte) error { } case "languageset": - if err := dec.Decode(&s.Languageset); err != nil { - return fmt.Errorf("%s | %w", "Languageset", err) + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := &phoneticlanguage.PhoneticLanguage{} + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Languageset", err) + } + + s.Languageset = append(s.Languageset, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Languageset); err != nil { + return fmt.Errorf("%s | %w", "Languageset", err) + } } case "max_code_len": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +106,7 @@ func (s *PhoneticTokenFilter) UnmarshalJSON(data []byte) error { } case "replace": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/phrasesuggest.go b/typedapi/types/phrasesuggest.go index 96e6c0a82f..fd276fd77c 100644 --- a/typedapi/types/phrasesuggest.go +++ b/typedapi/types/phrasesuggest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PhraseSuggest type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L57-L62 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L57-L62 type PhraseSuggest struct { Length int `json:"length"` Offset int `json:"offset"` @@ -56,7 +56,7 @@ func (s *PhraseSuggest) UnmarshalJSON(data []byte) error { case "length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *PhraseSuggest) UnmarshalJSON(data []byte) error { case "offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/phrasesuggestcollate.go b/typedapi/types/phrasesuggestcollate.go index 26e8ffa025..f4503cd430 100644 --- a/typedapi/types/phrasesuggestcollate.go +++ b/typedapi/types/phrasesuggestcollate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PhraseSuggestCollate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L330-L343 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L333-L346 type PhraseSuggestCollate struct { // Params Parameters to use if the query is templated. Params map[string]json.RawMessage `json:"params,omitempty"` @@ -66,7 +66,7 @@ func (s *PhraseSuggestCollate) UnmarshalJSON(data []byte) error { } case "prune": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/phrasesuggestcollatequery.go b/typedapi/types/phrasesuggestcollatequery.go index c8688c1554..844c8a8ac0 100644 --- a/typedapi/types/phrasesuggestcollatequery.go +++ b/typedapi/types/phrasesuggestcollatequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PhraseSuggestCollateQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L345-L354 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L348-L357 type PhraseSuggestCollateQuery struct { // Id The search template ID. Id *string `json:"id,omitempty"` diff --git a/typedapi/types/phrasesuggester.go b/typedapi/types/phrasesuggester.go index b96caa2117..9a5b8ae734 100644 --- a/typedapi/types/phrasesuggester.go +++ b/typedapi/types/phrasesuggester.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PhraseSuggester type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L356-L414 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L359-L417 type PhraseSuggester struct { // Analyzer The analyzer to analyze the suggest text with. // Defaults to the search analyzer of the suggest field. @@ -119,7 +119,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { } case "confidence": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -145,7 +145,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { } case "force_unigrams": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -160,7 +160,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case "gram_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -180,7 +180,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { } case "max_errors": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -196,7 +196,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { } case "real_word_error_likelihood": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -225,7 +225,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case "shard_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -241,7 +241,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -274,7 +274,7 @@ func (s *PhraseSuggester) UnmarshalJSON(data []byte) error { case "token_limit": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/phrasesuggesthighlight.go b/typedapi/types/phrasesuggesthighlight.go index 14cc652cbb..a186c3f090 100644 --- a/typedapi/types/phrasesuggesthighlight.go +++ b/typedapi/types/phrasesuggesthighlight.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PhraseSuggestHighlight type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L416-L425 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L419-L428 type PhraseSuggestHighlight struct { // PostTag Use in conjunction with `pre_tag` to define the HTML tags to use for the // highlighted text. diff --git a/typedapi/types/phrasesuggestoption.go b/typedapi/types/phrasesuggestoption.go index f8fc0c04ab..46bb17561e 100644 --- a/typedapi/types/phrasesuggestoption.go +++ b/typedapi/types/phrasesuggestoption.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PhraseSuggestOption type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L86-L91 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L86-L91 type PhraseSuggestOption struct { CollateMatch *bool `json:"collate_match,omitempty"` Highlighted *string `json:"highlighted,omitempty"` @@ -55,7 +55,7 @@ func (s *PhraseSuggestOption) UnmarshalJSON(data []byte) error { switch t { case "collate_match": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -81,7 +81,7 @@ func (s *PhraseSuggestOption) UnmarshalJSON(data []byte) error { s.Highlighted = &o case "score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/pinneddoc.go b/typedapi/types/pinneddoc.go index 9e6b589209..fba18e5c8b 100644 --- a/typedapi/types/pinneddoc.go +++ b/typedapi/types/pinneddoc.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // PinnedDoc type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L253-L262 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L262-L271 type PinnedDoc struct { // Id_ The unique document ID. Id_ string `json:"_id"` diff --git a/typedapi/types/pinnedquery.go b/typedapi/types/pinnedquery.go index 9856c595f1..9a2b4f22cd 100644 --- a/typedapi/types/pinnedquery.go +++ b/typedapi/types/pinnedquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PinnedQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L232-L251 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L241-L260 type PinnedQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -67,7 +67,7 @@ func (s *PinnedQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/pipelineconfig.go b/typedapi/types/pipelineconfig.go index 8817c5cd3e..6ad147ac3f 100644 --- a/typedapi/types/pipelineconfig.go +++ b/typedapi/types/pipelineconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PipelineConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Pipeline.ts#L61-L75 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Pipeline.ts#L61-L75 type PipelineConfig struct { // Description Description of the ingest pipeline. Description *string `json:"description,omitempty"` diff --git a/typedapi/types/pipelinemetadata.go b/typedapi/types/pipelinemetadata.go index f218dc78e8..8a0574a1f1 100644 --- a/typedapi/types/pipelinemetadata.go +++ b/typedapi/types/pipelinemetadata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PipelineMetadata type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/logstash/_types/Pipeline.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/logstash/_types/Pipeline.ts#L23-L26 type PipelineMetadata struct { Type string `json:"type"` Version string `json:"version"` diff --git a/typedapi/types/pipelineprocessor.go b/typedapi/types/pipelineprocessor.go index 1a86ebfbaa..826480fc65 100644 --- a/typedapi/types/pipelineprocessor.go +++ b/typedapi/types/pipelineprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PipelineProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L928-L939 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L928-L939 type PipelineProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -92,7 +92,7 @@ func (s *PipelineProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -106,7 +106,7 @@ func (s *PipelineProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing_pipeline": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/pipelinesettings.go b/typedapi/types/pipelinesettings.go index 655f1e01de..fd4816e8ad 100644 --- a/typedapi/types/pipelinesettings.go +++ b/typedapi/types/pipelinesettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PipelineSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/logstash/_types/Pipeline.ts#L28-L59 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/logstash/_types/Pipeline.ts#L28-L59 type PipelineSettings struct { // PipelineBatchDelay When creating pipeline event batches, how long in milliseconds to wait for // each event before dispatching an undersized batch to pipeline workers. @@ -71,7 +71,7 @@ func (s *PipelineSettings) UnmarshalJSON(data []byte) error { case "pipeline.batch.delay": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *PipelineSettings) UnmarshalJSON(data []byte) error { case "pipeline.batch.size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -103,7 +103,7 @@ func (s *PipelineSettings) UnmarshalJSON(data []byte) error { case "pipeline.workers": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -119,7 +119,7 @@ func (s *PipelineSettings) UnmarshalJSON(data []byte) error { case "queue.checkpoint.writes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -135,7 +135,7 @@ func (s *PipelineSettings) UnmarshalJSON(data []byte) error { case "queue.max_bytes.number": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/pipelinesimulation.go b/typedapi/types/pipelinesimulation.go index d0925fe5f9..f954cec21d 100644 --- a/typedapi/types/pipelinesimulation.go +++ b/typedapi/types/pipelinesimulation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // PipelineSimulation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/simulate/types.ts#L33-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/simulate/types.ts#L33-L39 type PipelineSimulation struct { Doc *DocumentSimulation `json:"doc,omitempty"` ProcessorResults []PipelineSimulation `json:"processor_results,omitempty"` diff --git a/typedapi/types/pipeseparatedflagssimplequerystringflag.go b/typedapi/types/pipeseparatedflagssimplequerystringflag.go index 15c9f073ab..e1999139db 100644 --- a/typedapi/types/pipeseparatedflagssimplequerystringflag.go +++ b/typedapi/types/pipeseparatedflagssimplequerystringflag.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // simplequerystringflag.SimpleQueryStringFlag // string // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_spec_utils/PipeSeparatedFlags.ts#L20-L27 -type PipeSeparatedFlagsSimpleQueryStringFlag interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_spec_utils/PipeSeparatedFlags.ts#L20-L27 +type PipeSeparatedFlagsSimpleQueryStringFlag any diff --git a/typedapi/types/pivot.go b/typedapi/types/pivot.go index 8fad70721c..e2cf991b8b 100644 --- a/typedapi/types/pivot.go +++ b/typedapi/types/pivot.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Pivot type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/_types/Transform.ts#L54-L68 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/_types/Transform.ts#L54-L68 type Pivot struct { // Aggregations Defines how to aggregate the grouped data. The following aggregations are // currently supported: average, bucket diff --git a/typedapi/types/pivotgroupbycontainer.go b/typedapi/types/pivotgroupbycontainer.go index c370835269..e4bfc94e3d 100644 --- a/typedapi/types/pivotgroupbycontainer.go +++ b/typedapi/types/pivotgroupbycontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // PivotGroupByContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/_types/Transform.ts#L70-L78 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/_types/Transform.ts#L70-L78 type PivotGroupByContainer struct { DateHistogram *DateHistogramAggregation `json:"date_histogram,omitempty"` GeotileGrid *GeoTileGridAggregation `json:"geotile_grid,omitempty"` diff --git a/typedapi/types/pluginsrecord.go b/typedapi/types/pluginsrecord.go index 06cea9b1ca..522d90f27e 100644 --- a/typedapi/types/pluginsrecord.go +++ b/typedapi/types/pluginsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PluginsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/plugins/types.ts#L22-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/plugins/types.ts#L22-L52 type PluginsRecord struct { // Component The component name. Component *string `json:"component,omitempty"` diff --git a/typedapi/types/pluginsstatus.go b/typedapi/types/pluginsstatus.go index 5e60750277..057abe879c 100644 --- a/typedapi/types/pluginsstatus.go +++ b/typedapi/types/pluginsstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // PluginsStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L60-L62 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L60-L62 type PluginsStatus struct { Status shutdownstatus.ShutdownStatus `json:"status"` } diff --git a/typedapi/types/pluginstats.go b/typedapi/types/pluginstats.go index 966a239c43..bcc77c277f 100644 --- a/typedapi/types/pluginstats.go +++ b/typedapi/types/pluginstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PluginStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L180-L190 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L180-L190 type PluginStats struct { Classname string `json:"classname"` Description string `json:"description"` @@ -94,7 +94,7 @@ func (s *PluginStats) UnmarshalJSON(data []byte) error { } case "has_native_controller": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,7 +113,7 @@ func (s *PluginStats) UnmarshalJSON(data []byte) error { } case "licensed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/pointintimereference.go b/typedapi/types/pointintimereference.go index c90ec86046..a8a2ad3ef6 100644 --- a/typedapi/types/pointintimereference.go +++ b/typedapi/types/pointintimereference.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // PointInTimeReference type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/PointInTimeReference.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/PointInTimeReference.ts#L23-L26 type PointInTimeReference struct { Id string `json:"id"` KeepAlive Duration `json:"keep_alive,omitempty"` diff --git a/typedapi/types/pointproperty.go b/typedapi/types/pointproperty.go index 6deee50b6c..6e28518532 100644 --- a/typedapi/types/pointproperty.go +++ b/typedapi/types/pointproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // PointProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/geo.ts#L66-L71 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/geo.ts#L66-L71 type PointProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -83,7 +83,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -108,7 +108,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -129,7 +129,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -219,12 +219,6 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -243,6 +237,18 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -399,6 +405,12 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -410,7 +422,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -425,7 +437,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -439,7 +451,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { } case "ignore_z_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -479,7 +491,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -500,7 +512,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -590,12 +602,6 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -614,6 +620,18 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -770,6 +788,12 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -792,7 +816,7 @@ func (s *PointProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/pool.go b/typedapi/types/pool.go index a62615bfa5..cdc6d5d328 100644 --- a/typedapi/types/pool.go +++ b/typedapi/types/pool.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Pool type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L878-L895 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L878-L895 type Pool struct { // MaxInBytes Maximum amount of memory, in bytes, available for use by the heap. MaxInBytes *int64 `json:"max_in_bytes,omitempty"` @@ -59,7 +59,7 @@ func (s *Pool) UnmarshalJSON(data []byte) error { switch t { case "max_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -74,7 +74,7 @@ func (s *Pool) UnmarshalJSON(data []byte) error { } case "peak_max_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -89,7 +89,7 @@ func (s *Pool) UnmarshalJSON(data []byte) error { } case "peak_used_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -104,7 +104,7 @@ func (s *Pool) UnmarshalJSON(data []byte) error { } case "used_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/porterstemtokenfilter.go b/typedapi/types/porterstemtokenfilter.go index d48b2bc8d8..4399f783ff 100644 --- a/typedapi/types/porterstemtokenfilter.go +++ b/typedapi/types/porterstemtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // PorterStemTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L292-L294 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L294-L296 type PorterStemTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` diff --git a/typedapi/types/postmigrationfeature.go b/typedapi/types/postmigrationfeature.go index e3bd30add7..de94d7a6ea 100644 --- a/typedapi/types/postmigrationfeature.go +++ b/typedapi/types/postmigrationfeature.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PostMigrationFeature type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/migration/post_feature_upgrade/PostFeatureUpgradeResponse.ts#L27-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/migration/post_feature_upgrade/PostFeatureUpgradeResponse.ts#L27-L29 type PostMigrationFeature struct { FeatureName string `json:"feature_name"` } diff --git a/typedapi/types/predicatetokenfilter.go b/typedapi/types/predicatetokenfilter.go index 198a2d3d0d..711313e61c 100644 --- a/typedapi/types/predicatetokenfilter.go +++ b/typedapi/types/predicatetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // PredicateTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L296-L299 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L298-L301 type PredicateTokenFilter struct { Script Script `json:"script"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/predictedvalue.go b/typedapi/types/predictedvalue.go index 5ff26df81e..8b4d072ad2 100644 --- a/typedapi/types/predictedvalue.go +++ b/typedapi/types/predictedvalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -27,5 +27,5 @@ package types // bool // int // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L457-L457 -type PredictedValue interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L457-L457 +type PredictedValue any diff --git a/typedapi/types/prefixquery.go b/typedapi/types/prefixquery.go index 16574bd52a..942bfdb017 100644 --- a/typedapi/types/prefixquery.go +++ b/typedapi/types/prefixquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PrefixQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L87-L106 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L88-L107 type PrefixQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -76,7 +76,7 @@ func (s *PrefixQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *PrefixQuery) UnmarshalJSON(data []byte) error { } case "case_insensitive": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/preprocessor.go b/typedapi/types/preprocessor.go index b1f6ca5857..0a986a0a3c 100644 --- a/typedapi/types/preprocessor.go +++ b/typedapi/types/preprocessor.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Preprocessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model/types.ts#L31-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model/types.ts#L31-L36 type Preprocessor struct { FrequencyEncoding *FrequencyEncodingPreprocessor `json:"frequency_encoding,omitempty"` OneHotEncoding *OneHotEncodingPreprocessor `json:"one_hot_encoding,omitempty"` diff --git a/typedapi/types/pressurememory.go b/typedapi/types/pressurememory.go index 5d89ad3219..589b269bc3 100644 --- a/typedapi/types/pressurememory.go +++ b/typedapi/types/pressurememory.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PressureMemory type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L144-L199 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L144-L199 type PressureMemory struct { // All Memory consumed by indexing requests in the coordinating, primary, or replica // stage. @@ -89,7 +89,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { } case "all_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -109,7 +109,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { } case "combined_coordinating_and_primary_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -129,7 +129,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { } case "coordinating_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -144,7 +144,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { } case "coordinating_rejections": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -164,7 +164,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { } case "primary_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -179,7 +179,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { } case "primary_rejections": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -199,7 +199,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { } case "replica_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -214,7 +214,7 @@ func (s *PressureMemory) UnmarshalJSON(data []byte) error { } case "replica_rejections": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/privileges.go b/typedapi/types/privileges.go index 3d81780967..3a95c7c636 100644 --- a/typedapi/types/privileges.go +++ b/typedapi/types/privileges.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Privileges type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/has_privileges/types.ts#L48-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/has_privileges/types.ts#L48-L48 type Privileges map[string]bool diff --git a/typedapi/types/privilegesactions.go b/typedapi/types/privilegesactions.go index 2861e932e3..dbef42da62 100644 --- a/typedapi/types/privilegesactions.go +++ b/typedapi/types/privilegesactions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PrivilegesActions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/put_privileges/types.ts#L22-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/put_privileges/types.ts#L22-L27 type PrivilegesActions struct { Actions []string `json:"actions"` Application *string `json:"application,omitempty"` diff --git a/typedapi/types/privilegescheck.go b/typedapi/types/privilegescheck.go index 6503cec029..e0fb2460cf 100644 --- a/typedapi/types/privilegescheck.go +++ b/typedapi/types/privilegescheck.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // PrivilegesCheck type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/has_privileges_user_profile/types.ts#L30-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/has_privileges_user_profile/types.ts#L30-L37 type PrivilegesCheck struct { Application []ApplicationPrivilegesCheck `json:"application,omitempty"` // Cluster A list of the cluster privileges that you want to check. diff --git a/typedapi/types/process.go b/typedapi/types/process.go index d619eecb6f..b77bcfae44 100644 --- a/typedapi/types/process.go +++ b/typedapi/types/process.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Process type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L953-L975 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L953-L975 type Process struct { // Cpu Contains CPU statistics for the node. Cpu *Cpu `json:"cpu,omitempty"` @@ -70,7 +70,7 @@ func (s *Process) UnmarshalJSON(data []byte) error { case "max_file_descriptors": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *Process) UnmarshalJSON(data []byte) error { case "open_file_descriptors": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -106,7 +106,7 @@ func (s *Process) UnmarshalJSON(data []byte) error { } case "timestamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/processor.go b/typedapi/types/processor.go index 6431e8abbb..73b7eca2b4 100644 --- a/typedapi/types/processor.go +++ b/typedapi/types/processor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Processor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L384-L401 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L384-L401 type Processor struct { // Count Number of documents transformed by the processor. Count *int64 `json:"count,omitempty"` @@ -59,7 +59,7 @@ func (s *Processor) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -74,7 +74,7 @@ func (s *Processor) UnmarshalJSON(data []byte) error { } case "current": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -89,7 +89,7 @@ func (s *Processor) UnmarshalJSON(data []byte) error { } case "failed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/processorcontainer.go b/typedapi/types/processorcontainer.go index 2b967858e1..68a8d8ca0f 100644 --- a/typedapi/types/processorcontainer.go +++ b/typedapi/types/processorcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ProcessorContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L27-L239 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L27-L239 type ProcessorContainer struct { // Append Appends one or more values to an existing array if the field already exists // and it is an array. diff --git a/typedapi/types/profile.go b/typedapi/types/profile.go index eeb03f28bb..0c6487994a 100644 --- a/typedapi/types/profile.go +++ b/typedapi/types/profile.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Profile type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/profile.ts#L93-L95 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/profile.ts#L93-L95 type Profile struct { Shards []ShardProfile `json:"shards"` } diff --git a/typedapi/types/property.go b/typedapi/types/property.go index f9c15a2aff..e3233ea447 100644 --- a/typedapi/types/property.go +++ b/typedapi/types/property.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -39,10 +39,11 @@ package types // DateProperty // AggregateMetricDoubleProperty // DenseVectorProperty -// SparseVectorProperty // FlattenedProperty // NestedProperty // ObjectProperty +// SemanticTextProperty +// SparseVectorProperty // CompletionProperty // ConstantKeywordProperty // FieldAliasProperty @@ -69,6 +70,7 @@ package types // IntegerRangeProperty // IpRangeProperty // LongRangeProperty +// IcuCollationProperty // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/Property.ts#L94-L158 -type Property interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/Property.ts#L96-L164 +type Property any diff --git a/typedapi/types/publishedclusterstates.go b/typedapi/types/publishedclusterstates.go index 1021b262e4..f9907d48a7 100644 --- a/typedapi/types/publishedclusterstates.go +++ b/typedapi/types/publishedclusterstates.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // PublishedClusterStates type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L263-L276 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L263-L276 type PublishedClusterStates struct { // CompatibleDiffs Number of compatible differences between published cluster states. CompatibleDiffs *int64 `json:"compatible_diffs,omitempty"` @@ -57,7 +57,7 @@ func (s *PublishedClusterStates) UnmarshalJSON(data []byte) error { switch t { case "compatible_diffs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *PublishedClusterStates) UnmarshalJSON(data []byte) error { } case "full_states": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *PublishedClusterStates) UnmarshalJSON(data []byte) error { } case "incompatible_diffs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/queries.go b/typedapi/types/queries.go index 595d8fcae2..39f33e1a78 100644 --- a/typedapi/types/queries.go +++ b/typedapi/types/queries.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Queries type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L401-L403 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L403-L405 type Queries struct { Cache *CacheQueries `json:"cache,omitempty"` } diff --git a/typedapi/types/query.go b/typedapi/types/query.go index aef8eb2cf0..d4c7e343f8 100644 --- a/typedapi/types/query.go +++ b/typedapi/types/query.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // Query type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/abstractions.ts#L100-L407 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/abstractions.ts#L102-L427 type Query struct { // Bool matches documents matching boolean combinations of other queries. Bool *BoolQuery `json:"bool,omitempty"` @@ -57,9 +57,6 @@ type Query struct { DistanceFeature DistanceFeatureQuery `json:"distance_feature,omitempty"` // Exists Returns documents that contain an indexed value for a field. Exists *ExistsQuery `json:"exists,omitempty"` - // FieldMaskingSpan Wrapper to allow span queries to participate in composite single-field span - // queries by _lying_ about their search field. - FieldMaskingSpan *SpanFieldMaskingQuery `json:"field_masking_span,omitempty"` // FunctionScore The `function_score` enables you to modify the score of documents that are // retrieved by a query. FunctionScore *FunctionScoreQuery `json:"function_score,omitempty"` @@ -134,13 +131,15 @@ type Query struct { // `rank_feature` or `rank_features` field. RankFeature *RankFeatureQuery `json:"rank_feature,omitempty"` // Regexp Returns documents that contain terms matching a regular expression. - Regexp map[string]RegexpQuery `json:"regexp,omitempty"` - RuleQuery *RuleQuery `json:"rule_query,omitempty"` + Regexp map[string]RegexpQuery `json:"regexp,omitempty"` + Rule *RuleQuery `json:"rule,omitempty"` // Script Filters documents based on a provided script. // The script query is typically used in a filter context. Script *ScriptQuery `json:"script,omitempty"` // ScriptScore Uses a script to provide a custom score for returned documents. ScriptScore *ScriptScoreQuery `json:"script_score,omitempty"` + // Semantic A semantic query to semantic_text field types + Semantic *SemanticQuery `json:"semantic,omitempty"` // Shape Queries documents that contain fields indexed using the `shape` type. Shape *ShapeQuery `json:"shape,omitempty"` // SimpleQueryString Returns documents based on a provided query string, using a parser with a @@ -148,6 +147,9 @@ type Query struct { SimpleQueryString *SimpleQueryStringQuery `json:"simple_query_string,omitempty"` // SpanContaining Returns matches which enclose another span query. SpanContaining *SpanContainingQuery `json:"span_containing,omitempty"` + // SpanFieldMasking Wrapper to allow span queries to participate in composite single-field span + // queries by _lying_ about their search field. + SpanFieldMasking *SpanFieldMaskingQuery `json:"span_field_masking,omitempty"` // SpanFirst Matches spans near the beginning of a field. SpanFirst *SpanFirstQuery `json:"span_first,omitempty"` // SpanMulti Allows you to wrap a multi term query (one of `wildcard`, `fuzzy`, `prefix`, @@ -167,6 +169,10 @@ type Query struct { SpanTerm map[string]SpanTermQuery `json:"span_term,omitempty"` // SpanWithin Returns matches which are enclosed inside another span query. SpanWithin *SpanWithinQuery `json:"span_within,omitempty"` + // SparseVector Using input query vectors or a natural language processing model to convert a + // query into a list of token-weight pairs, queries against a sparse vector + // field. + SparseVector *SparseVectorQuery `json:"sparse_vector,omitempty"` // Term Returns documents that contain an exact term in a provided field. // To return a document, the query term must exactly match the queried field's // value, including whitespace and capitalization. @@ -243,30 +249,15 @@ func (s *Query) UnmarshalJSON(data []byte) error { } case "distance_feature": - message := json.RawMessage{} - if err := dec.Decode(&message); err != nil { + if err := dec.Decode(&s.DistanceFeature); err != nil { return fmt.Errorf("%s | %w", "DistanceFeature", err) } - o := NewGeoDistanceFeatureQuery() - err := json.Unmarshal(message, &o) - if err != nil { - o := NewDateDistanceFeatureQuery() - err := json.Unmarshal(message, &o) - if err != nil { - return fmt.Errorf("%s | %w", "DistanceFeature", err) - } - } case "exists": if err := dec.Decode(&s.Exists); err != nil { return fmt.Errorf("%s | %w", "Exists", err) } - case "field_masking_span": - if err := dec.Decode(&s.FieldMaskingSpan); err != nil { - return fmt.Errorf("%s | %w", "FieldMaskingSpan", err) - } - case "function_score": if err := dec.Decode(&s.FunctionScore); err != nil { return fmt.Errorf("%s | %w", "FunctionScore", err) @@ -434,9 +425,9 @@ func (s *Query) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Regexp", err) } - case "rule_query": - if err := dec.Decode(&s.RuleQuery); err != nil { - return fmt.Errorf("%s | %w", "RuleQuery", err) + case "rule": + if err := dec.Decode(&s.Rule); err != nil { + return fmt.Errorf("%s | %w", "Rule", err) } case "script": @@ -449,6 +440,11 @@ func (s *Query) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "ScriptScore", err) } + case "semantic": + if err := dec.Decode(&s.Semantic); err != nil { + return fmt.Errorf("%s | %w", "Semantic", err) + } + case "shape": if err := dec.Decode(&s.Shape); err != nil { return fmt.Errorf("%s | %w", "Shape", err) @@ -464,6 +460,11 @@ func (s *Query) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "SpanContaining", err) } + case "span_field_masking": + if err := dec.Decode(&s.SpanFieldMasking); err != nil { + return fmt.Errorf("%s | %w", "SpanFieldMasking", err) + } + case "span_first": if err := dec.Decode(&s.SpanFirst); err != nil { return fmt.Errorf("%s | %w", "SpanFirst", err) @@ -502,6 +503,11 @@ func (s *Query) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "SpanWithin", err) } + case "sparse_vector": + if err := dec.Decode(&s.SparseVector); err != nil { + return fmt.Errorf("%s | %w", "SparseVector", err) + } + case "term": if s.Term == nil { s.Term = make(map[string]TermQuery, 0) diff --git a/typedapi/types/querybreakdown.go b/typedapi/types/querybreakdown.go index 49b843cc44..3378445295 100644 --- a/typedapi/types/querybreakdown.go +++ b/typedapi/types/querybreakdown.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // QueryBreakdown type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/profile.ts#L97-L116 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/profile.ts#L97-L116 type QueryBreakdown struct { Advance int64 `json:"advance"` AdvanceCount int64 `json:"advance_count"` @@ -69,7 +69,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { switch t { case "advance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -84,7 +84,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "advance_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -99,7 +99,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "build_scorer": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "build_scorer_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -129,7 +129,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "compute_max_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -144,7 +144,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "compute_max_score_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -159,7 +159,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "create_weight": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -174,7 +174,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "create_weight_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -189,7 +189,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "match": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -204,7 +204,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "match_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -219,7 +219,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "next_doc": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -234,7 +234,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "next_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -249,7 +249,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -264,7 +264,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "score_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -279,7 +279,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "set_min_competitive_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -294,7 +294,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "set_min_competitive_score_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -309,7 +309,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "shallow_advance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -324,7 +324,7 @@ func (s *QueryBreakdown) UnmarshalJSON(data []byte) error { } case "shallow_advance_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/querycachestats.go b/typedapi/types/querycachestats.go index c52fe13e9f..4fc58d8126 100644 --- a/typedapi/types/querycachestats.go +++ b/typedapi/types/querycachestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,20 +31,20 @@ import ( // QueryCacheStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L192-L226 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L192-L226 type QueryCacheStats struct { // CacheCount Total number of entries added to the query cache across all shards assigned // to selected nodes. // This number includes current and evicted entries. - CacheCount int `json:"cache_count"` + CacheCount int64 `json:"cache_count"` // CacheSize Total number of entries currently in the query cache across all shards // assigned to selected nodes. - CacheSize int `json:"cache_size"` + CacheSize int64 `json:"cache_size"` // Evictions Total number of query cache evictions across all shards assigned to selected // nodes. - Evictions int `json:"evictions"` + Evictions int64 `json:"evictions"` // HitCount Total count of query cache hits across all shards assigned to selected nodes. - HitCount int `json:"hit_count"` + HitCount int64 `json:"hit_count"` // MemorySize Total amount of memory used for the query cache across all shards assigned to // selected nodes. MemorySize ByteSize `json:"memory_size,omitempty"` @@ -53,10 +53,10 @@ type QueryCacheStats struct { MemorySizeInBytes int64 `json:"memory_size_in_bytes"` // MissCount Total count of query cache misses across all shards assigned to selected // nodes. - MissCount int `json:"miss_count"` + MissCount int64 `json:"miss_count"` // TotalCount Total count of hits and misses in the query cache across all shards assigned // to selected nodes. - TotalCount int `json:"total_count"` + TotalCount int64 `json:"total_count"` } func (s *QueryCacheStats) UnmarshalJSON(data []byte) error { @@ -75,66 +75,62 @@ func (s *QueryCacheStats) UnmarshalJSON(data []byte) error { switch t { case "cache_count": - - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.Atoi(v) + value, err := strconv.ParseInt(v, 10, 64) if err != nil { return fmt.Errorf("%s | %w", "CacheCount", err) } s.CacheCount = value case float64: - f := int(v) + f := int64(v) s.CacheCount = f } case "cache_size": - - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.Atoi(v) + value, err := strconv.ParseInt(v, 10, 64) if err != nil { return fmt.Errorf("%s | %w", "CacheSize", err) } s.CacheSize = value case float64: - f := int(v) + f := int64(v) s.CacheSize = f } case "evictions": - - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.Atoi(v) + value, err := strconv.ParseInt(v, 10, 64) if err != nil { return fmt.Errorf("%s | %w", "Evictions", err) } s.Evictions = value case float64: - f := int(v) + f := int64(v) s.Evictions = f } case "hit_count": - - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.Atoi(v) + value, err := strconv.ParseInt(v, 10, 64) if err != nil { return fmt.Errorf("%s | %w", "HitCount", err) } s.HitCount = value case float64: - f := int(v) + f := int64(v) s.HitCount = f } @@ -144,7 +140,7 @@ func (s *QueryCacheStats) UnmarshalJSON(data []byte) error { } case "memory_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -159,34 +155,32 @@ func (s *QueryCacheStats) UnmarshalJSON(data []byte) error { } case "miss_count": - - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.Atoi(v) + value, err := strconv.ParseInt(v, 10, 64) if err != nil { return fmt.Errorf("%s | %w", "MissCount", err) } s.MissCount = value case float64: - f := int(v) + f := int64(v) s.MissCount = f } case "total_count": - - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: - value, err := strconv.Atoi(v) + value, err := strconv.ParseInt(v, 10, 64) if err != nil { return fmt.Errorf("%s | %w", "TotalCount", err) } s.TotalCount = value case float64: - f := int(v) + f := int64(v) s.TotalCount = f } diff --git a/typedapi/types/queryprofile.go b/typedapi/types/queryprofile.go index de15745fa8..80f8e77146 100644 --- a/typedapi/types/queryprofile.go +++ b/typedapi/types/queryprofile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // QueryProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/profile.ts#L118-L124 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/profile.ts#L118-L124 type QueryProfile struct { Breakdown QueryBreakdown `json:"breakdown"` Children []QueryProfile `json:"children,omitempty"` diff --git a/typedapi/types/queryrule.go b/typedapi/types/queryrule.go index 0ca5874ce7..ba1cdee4c9 100644 --- a/typedapi/types/queryrule.go +++ b/typedapi/types/queryrule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // QueryRule type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/query_ruleset/_types/QueryRuleset.ts#L37-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/_types/QueryRuleset.ts#L37-L42 type QueryRule struct { Actions QueryRuleActions `json:"actions"` Criteria []QueryRuleCriteria `json:"criteria"` diff --git a/typedapi/types/queryruleactions.go b/typedapi/types/queryruleactions.go index 15696e81b7..893520e870 100644 --- a/typedapi/types/queryruleactions.go +++ b/typedapi/types/queryruleactions.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // QueryRuleActions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/query_ruleset/_types/QueryRuleset.ts#L67-L70 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/_types/QueryRuleset.ts#L68-L71 type QueryRuleActions struct { Docs []PinnedDoc `json:"docs,omitempty"` Ids []string `json:"ids,omitempty"` diff --git a/typedapi/types/queryrulecriteria.go b/typedapi/types/queryrulecriteria.go index d576bd2561..39a9204178 100644 --- a/typedapi/types/queryrulecriteria.go +++ b/typedapi/types/queryrulecriteria.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,9 +33,9 @@ import ( // QueryRuleCriteria type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/query_ruleset/_types/QueryRuleset.ts#L48-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/_types/QueryRuleset.ts#L48-L52 type QueryRuleCriteria struct { - Metadata string `json:"metadata"` + Metadata *string `json:"metadata,omitempty"` Type queryrulecriteriatype.QueryRuleCriteriaType `json:"type"` Values []json.RawMessage `json:"values,omitempty"` } @@ -65,7 +65,7 @@ func (s *QueryRuleCriteria) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.Metadata = o + s.Metadata = &o case "type": if err := dec.Decode(&s.Type); err != nil { diff --git a/typedapi/types/queryruleset.go b/typedapi/types/queryruleset.go index 694071b74c..2ba36b109a 100644 --- a/typedapi/types/queryruleset.go +++ b/typedapi/types/queryruleset.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // QueryRuleset type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/query_ruleset/_types/QueryRuleset.ts#L26-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/_types/QueryRuleset.ts#L26-L35 type QueryRuleset struct { // Rules Rules associated with the query ruleset Rules []QueryRule `json:"rules"` diff --git a/typedapi/types/queryrulesetlistitem.go b/typedapi/types/queryrulesetlistitem.go index aabebfaa35..b27a8f60b5 100644 --- a/typedapi/types/queryrulesetlistitem.go +++ b/typedapi/types/queryrulesetlistitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,10 +31,12 @@ import ( // QueryRulesetListItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/query_ruleset/list/types.ts#L22-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/query_rules/list_rulesets/types.ts#L23-L37 type QueryRulesetListItem struct { - // RulesCount The number of rules associated with this ruleset - RulesCount int `json:"rules_count"` + // RuleCriteriaTypesCounts A map of criteria type to the number of rules of that type + RuleCriteriaTypesCounts map[string]string `json:"rule_criteria_types_counts"` + // RuleTotalCount The number of rules associated with this ruleset + RuleTotalCount int `json:"rule_total_count"` // RulesetId Ruleset unique identifier RulesetId string `json:"ruleset_id"` } @@ -54,20 +56,28 @@ func (s *QueryRulesetListItem) UnmarshalJSON(data []byte) error { switch t { - case "rules_count": + case "rule_criteria_types_counts": + if s.RuleCriteriaTypesCounts == nil { + s.RuleCriteriaTypesCounts = make(map[string]string, 0) + } + if err := dec.Decode(&s.RuleCriteriaTypesCounts); err != nil { + return fmt.Errorf("%s | %w", "RuleCriteriaTypesCounts", err) + } - var tmp interface{} + case "rule_total_count": + + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: value, err := strconv.Atoi(v) if err != nil { - return fmt.Errorf("%s | %w", "RulesCount", err) + return fmt.Errorf("%s | %w", "RuleTotalCount", err) } - s.RulesCount = value + s.RuleTotalCount = value case float64: f := int(v) - s.RulesCount = f + s.RuleTotalCount = f } case "ruleset_id": @@ -82,7 +92,9 @@ func (s *QueryRulesetListItem) UnmarshalJSON(data []byte) error { // NewQueryRulesetListItem returns a QueryRulesetListItem. func NewQueryRulesetListItem() *QueryRulesetListItem { - r := &QueryRulesetListItem{} + r := &QueryRulesetListItem{ + RuleCriteriaTypesCounts: make(map[string]string, 0), + } return r } diff --git a/typedapi/types/querystringquery.go b/typedapi/types/querystringquery.go index 61674c3bdd..d5e25aea38 100644 --- a/typedapi/types/querystringquery.go +++ b/typedapi/types/querystringquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // QueryStringQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L580-L700 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L580-L700 type QueryStringQuery struct { // AllowLeadingWildcard If `true`, the wildcard characters `*` and `?` are allowed as the first // character of the query string. @@ -124,7 +124,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { switch t { case "allow_leading_wildcard": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -138,7 +138,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { } case "analyze_wildcard": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -164,7 +164,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { s.Analyzer = &o case "auto_generate_synonyms_phrase_query": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -178,7 +178,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { } case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -204,7 +204,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { } case "enable_position_increments": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -218,7 +218,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { } case "escape": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -243,7 +243,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case "fuzzy_max_expansions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -259,7 +259,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case "fuzzy_prefix_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -279,7 +279,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { } case "fuzzy_transpositions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -293,7 +293,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { } case "lenient": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -308,7 +308,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { case "max_determinized_states": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -328,7 +328,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { } case "phrase_slop": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -397,7 +397,7 @@ func (s *QueryStringQuery) UnmarshalJSON(data []byte) error { } case "tie_breaker": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/queryvectorbuilder.go b/typedapi/types/queryvectorbuilder.go index fb7085d27c..713b6233fe 100644 --- a/typedapi/types/queryvectorbuilder.go +++ b/typedapi/types/queryvectorbuilder.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // QueryVectorBuilder type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Knn.ts#L51-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Knn.ts#L69-L72 type QueryVectorBuilder struct { TextEmbedding *TextEmbedding `json:"text_embedding,omitempty"` } diff --git a/typedapi/types/querywatch.go b/typedapi/types/querywatch.go index 28bdcf9e38..26150d0942 100644 --- a/typedapi/types/querywatch.go +++ b/typedapi/types/querywatch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // QueryWatch type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Watch.ts#L58-L64 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Watch.ts#L58-L64 type QueryWatch struct { Id_ string `json:"_id"` PrimaryTerm_ *int `json:"_primary_term,omitempty"` @@ -62,7 +62,7 @@ func (s *QueryWatch) UnmarshalJSON(data []byte) error { case "_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/questionansweringinferenceoptions.go b/typedapi/types/questionansweringinferenceoptions.go index 8efa52d4fc..2e491ba437 100644 --- a/typedapi/types/questionansweringinferenceoptions.go +++ b/typedapi/types/questionansweringinferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // QuestionAnsweringInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L282-L292 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L282-L292 type QuestionAnsweringInferenceOptions struct { // MaxAnswerLength The maximum answer length to consider MaxAnswerLength *int `json:"max_answer_length,omitempty"` @@ -61,7 +61,7 @@ func (s *QuestionAnsweringInferenceOptions) UnmarshalJSON(data []byte) error { case "max_answer_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -77,7 +77,7 @@ func (s *QuestionAnsweringInferenceOptions) UnmarshalJSON(data []byte) error { case "num_top_classes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/questionansweringinferenceupdateoptions.go b/typedapi/types/questionansweringinferenceupdateoptions.go index 68c27d644c..a913cd69be 100644 --- a/typedapi/types/questionansweringinferenceupdateoptions.go +++ b/typedapi/types/questionansweringinferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // QuestionAnsweringInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L420-L431 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L420-L431 type QuestionAnsweringInferenceUpdateOptions struct { // MaxAnswerLength The maximum answer length to consider for extraction MaxAnswerLength *int `json:"max_answer_length,omitempty"` @@ -63,7 +63,7 @@ func (s *QuestionAnsweringInferenceUpdateOptions) UnmarshalJSON(data []byte) err case "max_answer_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -79,7 +79,7 @@ func (s *QuestionAnsweringInferenceUpdateOptions) UnmarshalJSON(data []byte) err case "num_top_classes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/randomscorefunction.go b/typedapi/types/randomscorefunction.go index d589e8ebe5..48ee5a2445 100644 --- a/typedapi/types/randomscorefunction.go +++ b/typedapi/types/randomscorefunction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RandomScoreFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L127-L130 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L128-L131 type RandomScoreFunction struct { Field *string `json:"field,omitempty"` Seed string `json:"seed,omitempty"` diff --git a/typedapi/types/rangeaggregate.go b/typedapi/types/rangeaggregate.go index 888a2fa207..db7f39e58b 100644 --- a/typedapi/types/rangeaggregate.go +++ b/typedapi/types/rangeaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // RangeAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L531-L532 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L535-L536 type RangeAggregate struct { Buckets BucketsRangeBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/rangeaggregation.go b/typedapi/types/rangeaggregation.go index f9b16f1d46..14ffd72467 100644 --- a/typedapi/types/rangeaggregation.go +++ b/typedapi/types/rangeaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,19 +31,17 @@ import ( // RangeAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L650-L670 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L652-L672 type RangeAggregation struct { // Field The date field whose values are use to build ranges. Field *string `json:"field,omitempty"` Format *string `json:"format,omitempty"` // Keyed Set to `true` to associate a unique string key with each bucket and return // the ranges as a hash rather than an array. - Keyed *bool `json:"keyed,omitempty"` - Meta Metadata `json:"meta,omitempty"` + Keyed *bool `json:"keyed,omitempty"` // Missing The value to apply to documents that do not have a value. // By default, documents without a value are ignored. - Missing *int `json:"missing,omitempty"` - Name *string `json:"name,omitempty"` + Missing *int `json:"missing,omitempty"` // Ranges An array of ranges used to bucket documents. Ranges []AggregationRange `json:"ranges,omitempty"` Script Script `json:"script,omitempty"` @@ -82,7 +80,7 @@ func (s *RangeAggregation) UnmarshalJSON(data []byte) error { s.Format = &o case "keyed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,14 +93,9 @@ func (s *RangeAggregation) UnmarshalJSON(data []byte) error { s.Keyed = &v } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -116,18 +109,6 @@ func (s *RangeAggregation) UnmarshalJSON(data []byte) error { s.Missing = &f } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "ranges": if err := dec.Decode(&s.Ranges); err != nil { return fmt.Errorf("%s | %w", "Ranges", err) diff --git a/typedapi/types/rangebucket.go b/typedapi/types/rangebucket.go index 1db673df41..7646d001cb 100644 --- a/typedapi/types/rangebucket.go +++ b/typedapi/types/rangebucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // RangeBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L534-L541 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L538-L545 type RangeBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -60,7 +60,7 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -75,7 +75,7 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { } case "from": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -115,7 +115,7 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { s.Key = &o case "to": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -587,7 +587,7 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -637,7 +637,7 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -647,7 +647,7 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -664,7 +664,7 @@ func (s *RangeBucket) UnmarshalJSON(data []byte) error { func (s RangeBucket) MarshalJSON() ([]byte, error) { type opt RangeBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/rangequery.go b/typedapi/types/rangequery.go index ed7dd9fe99..38be81d60a 100644 --- a/typedapi/types/rangequery.go +++ b/typedapi/types/rangequery.go @@ -16,14 +16,16 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // RangeQuery holds the union for the following types: // +// UntypedRangeQuery // DateRangeQuery // NumberRangeQuery +// TermRangeQuery // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L166-L168 -type RangeQuery interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L161-L170 +type RangeQuery any diff --git a/typedapi/types/rangequerybasedatemath.go b/typedapi/types/rangequerybasedatemath.go new file mode 100644 index 0000000000..8852cf8e24 --- /dev/null +++ b/typedapi/types/rangequerybasedatemath.go @@ -0,0 +1,147 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" + + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/rangerelation" +) + +// RangeQueryBaseDateMath type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L109-L133 +type RangeQueryBaseDateMath struct { + // Boost Floating point number used to decrease or increase the relevance scores of + // the query. + // Boost values are relative to the default value of 1.0. + // A boost value between 0 and 1.0 decreases the relevance score. + // A value greater than 1.0 increases the relevance score. + Boost *float32 `json:"boost,omitempty"` + From *string `json:"from,omitempty"` + // Gt Greater than. + Gt *string `json:"gt,omitempty"` + // Gte Greater than or equal to. + Gte *string `json:"gte,omitempty"` + // Lt Less than. + Lt *string `json:"lt,omitempty"` + // Lte Less than or equal to. + Lte *string `json:"lte,omitempty"` + QueryName_ *string `json:"_name,omitempty"` + // Relation Indicates how the range query matches values for `range` fields. + Relation *rangerelation.RangeRelation `json:"relation,omitempty"` + To *string `json:"to,omitempty"` +} + +func (s *RangeQueryBaseDateMath) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return fmt.Errorf("%s | %w", "Boost", err) + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "from": + if err := dec.Decode(&s.From); err != nil { + return fmt.Errorf("%s | %w", "From", err) + } + + case "gt": + if err := dec.Decode(&s.Gt); err != nil { + return fmt.Errorf("%s | %w", "Gt", err) + } + + case "gte": + if err := dec.Decode(&s.Gte); err != nil { + return fmt.Errorf("%s | %w", "Gte", err) + } + + case "lt": + if err := dec.Decode(&s.Lt); err != nil { + return fmt.Errorf("%s | %w", "Lt", err) + } + + case "lte": + if err := dec.Decode(&s.Lte); err != nil { + return fmt.Errorf("%s | %w", "Lte", err) + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "QueryName_", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.QueryName_ = &o + + case "relation": + if err := dec.Decode(&s.Relation); err != nil { + return fmt.Errorf("%s | %w", "Relation", err) + } + + case "to": + if err := dec.Decode(&s.To); err != nil { + return fmt.Errorf("%s | %w", "To", err) + } + + } + } + return nil +} + +// NewRangeQueryBaseDateMath returns a RangeQueryBaseDateMath. +func NewRangeQueryBaseDateMath() *RangeQueryBaseDateMath { + r := &RangeQueryBaseDateMath{} + + return r +} diff --git a/typedapi/types/rangequerybasedouble.go b/typedapi/types/rangequerybasedouble.go new file mode 100644 index 0000000000..df453a6c9c --- /dev/null +++ b/typedapi/types/rangequerybasedouble.go @@ -0,0 +1,191 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" + + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/rangerelation" +) + +// RangeQueryBasedouble type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L109-L133 +type RangeQueryBasedouble struct { + // Boost Floating point number used to decrease or increase the relevance scores of + // the query. + // Boost values are relative to the default value of 1.0. + // A boost value between 0 and 1.0 decreases the relevance score. + // A value greater than 1.0 increases the relevance score. + Boost *float32 `json:"boost,omitempty"` + From *Float64 `json:"from,omitempty"` + // Gt Greater than. + Gt *Float64 `json:"gt,omitempty"` + // Gte Greater than or equal to. + Gte *Float64 `json:"gte,omitempty"` + // Lt Less than. + Lt *Float64 `json:"lt,omitempty"` + // Lte Less than or equal to. + Lte *Float64 `json:"lte,omitempty"` + QueryName_ *string `json:"_name,omitempty"` + // Relation Indicates how the range query matches values for `range` fields. + Relation *rangerelation.RangeRelation `json:"relation,omitempty"` + To *Float64 `json:"to,omitempty"` +} + +func (s *RangeQueryBasedouble) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return fmt.Errorf("%s | %w", "Boost", err) + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "from": + if err := dec.Decode(&s.From); err != nil { + return fmt.Errorf("%s | %w", "From", err) + } + + case "gt": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return fmt.Errorf("%s | %w", "Gt", err) + } + f := Float64(value) + s.Gt = &f + case float64: + f := Float64(v) + s.Gt = &f + } + + case "gte": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return fmt.Errorf("%s | %w", "Gte", err) + } + f := Float64(value) + s.Gte = &f + case float64: + f := Float64(v) + s.Gte = &f + } + + case "lt": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return fmt.Errorf("%s | %w", "Lt", err) + } + f := Float64(value) + s.Lt = &f + case float64: + f := Float64(v) + s.Lt = &f + } + + case "lte": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 64) + if err != nil { + return fmt.Errorf("%s | %w", "Lte", err) + } + f := Float64(value) + s.Lte = &f + case float64: + f := Float64(v) + s.Lte = &f + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "QueryName_", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.QueryName_ = &o + + case "relation": + if err := dec.Decode(&s.Relation); err != nil { + return fmt.Errorf("%s | %w", "Relation", err) + } + + case "to": + if err := dec.Decode(&s.To); err != nil { + return fmt.Errorf("%s | %w", "To", err) + } + + } + } + return nil +} + +// NewRangeQueryBasedouble returns a RangeQueryBasedouble. +func NewRangeQueryBasedouble() *RangeQueryBasedouble { + r := &RangeQueryBasedouble{} + + return r +} diff --git a/typedapi/types/rangequerybasestring.go b/typedapi/types/rangequerybasestring.go new file mode 100644 index 0000000000..0040a50803 --- /dev/null +++ b/typedapi/types/rangequerybasestring.go @@ -0,0 +1,189 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" + + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/rangerelation" +) + +// RangeQueryBasestring type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L109-L133 +type RangeQueryBasestring struct { + // Boost Floating point number used to decrease or increase the relevance scores of + // the query. + // Boost values are relative to the default value of 1.0. + // A boost value between 0 and 1.0 decreases the relevance score. + // A value greater than 1.0 increases the relevance score. + Boost *float32 `json:"boost,omitempty"` + From *string `json:"from,omitempty"` + // Gt Greater than. + Gt *string `json:"gt,omitempty"` + // Gte Greater than or equal to. + Gte *string `json:"gte,omitempty"` + // Lt Less than. + Lt *string `json:"lt,omitempty"` + // Lte Less than or equal to. + Lte *string `json:"lte,omitempty"` + QueryName_ *string `json:"_name,omitempty"` + // Relation Indicates how the range query matches values for `range` fields. + Relation *rangerelation.RangeRelation `json:"relation,omitempty"` + To *string `json:"to,omitempty"` +} + +func (s *RangeQueryBasestring) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return fmt.Errorf("%s | %w", "Boost", err) + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "from": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "From", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.From = &o + + case "gt": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Gt", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Gt = &o + + case "gte": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Gte", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Gte = &o + + case "lt": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Lt", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Lt = &o + + case "lte": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Lte", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Lte = &o + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "QueryName_", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.QueryName_ = &o + + case "relation": + if err := dec.Decode(&s.Relation); err != nil { + return fmt.Errorf("%s | %w", "Relation", err) + } + + case "to": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "To", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.To = &o + + } + } + return nil +} + +// NewRangeQueryBasestring returns a RangeQueryBasestring. +func NewRangeQueryBasestring() *RangeQueryBasestring { + r := &RangeQueryBasestring{} + + return r +} diff --git a/typedapi/types/rankcontainer.go b/typedapi/types/rankcontainer.go index acd0673da1..1399a0f3bf 100644 --- a/typedapi/types/rankcontainer.go +++ b/typedapi/types/rankcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // RankContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Rank.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Rank.ts#L22-L28 type RankContainer struct { // Rrf The reciprocal rank fusion parameters Rrf *RrfRank `json:"rrf,omitempty"` diff --git a/typedapi/types/rankeddocument.go b/typedapi/types/rankeddocument.go new file mode 100644 index 0000000000..8b0edc7804 --- /dev/null +++ b/typedapi/types/rankeddocument.go @@ -0,0 +1,110 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// RankedDocument type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/_types/Results.ts#L67-L77 +type RankedDocument struct { + Index int `json:"index"` + Score float32 `json:"score"` + Text *string `json:"text,omitempty"` +} + +func (s *RankedDocument) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "index": + + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return fmt.Errorf("%s | %w", "Index", err) + } + s.Index = value + case float64: + f := int(v) + s.Index = f + } + + case "score": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return fmt.Errorf("%s | %w", "Score", err) + } + f := float32(value) + s.Score = f + case float64: + f := float32(v) + s.Score = f + } + + case "text": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Text", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Text = &o + + } + } + return nil +} + +// NewRankedDocument returns a RankedDocument. +func NewRankedDocument() *RankedDocument { + r := &RankedDocument{} + + return r +} diff --git a/typedapi/types/rankevalhit.go b/typedapi/types/rankevalhit.go index c6191c955a..d472e0d7c7 100644 --- a/typedapi/types/rankevalhit.go +++ b/typedapi/types/rankevalhit.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RankEvalHit type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/types.ts#L141-L145 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/types.ts#L141-L145 type RankEvalHit struct { Id_ string `json:"_id"` Index_ string `json:"_index"` @@ -64,7 +64,7 @@ func (s *RankEvalHit) UnmarshalJSON(data []byte) error { } case "_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rankevalhititem.go b/typedapi/types/rankevalhititem.go index 54df332adf..c7df679d7c 100644 --- a/typedapi/types/rankevalhititem.go +++ b/typedapi/types/rankevalhititem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,10 +30,10 @@ import ( // RankEvalHitItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/types.ts#L136-L139 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/types.ts#L136-L139 type RankEvalHitItem struct { Hit RankEvalHit `json:"hit"` - Rating Float64 `json:"rating,omitempty"` + Rating *Float64 `json:"rating,omitempty"` } func (s *RankEvalHitItem) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/rankevalmetric.go b/typedapi/types/rankevalmetric.go index b85005bf9f..8495d92592 100644 --- a/typedapi/types/rankevalmetric.go +++ b/typedapi/types/rankevalmetric.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // RankEvalMetric type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/types.ts#L90-L96 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/types.ts#L90-L96 type RankEvalMetric struct { Dcg *RankEvalMetricDiscountedCumulativeGain `json:"dcg,omitempty"` ExpectedReciprocalRank *RankEvalMetricExpectedReciprocalRank `json:"expected_reciprocal_rank,omitempty"` diff --git a/typedapi/types/rankevalmetricdetail.go b/typedapi/types/rankevalmetricdetail.go index 7bcc01b0ba..203166b80b 100644 --- a/typedapi/types/rankevalmetricdetail.go +++ b/typedapi/types/rankevalmetricdetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RankEvalMetricDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/types.ts#L125-L134 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/types.ts#L125-L134 type RankEvalMetricDetail struct { // Hits The hits section shows a grouping of the search results with their supplied // ratings @@ -78,7 +78,7 @@ func (s *RankEvalMetricDetail) UnmarshalJSON(data []byte) error { } case "metric_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rankevalmetricdiscountedcumulativegain.go b/typedapi/types/rankevalmetricdiscountedcumulativegain.go index c14e03fa8f..785749da9b 100644 --- a/typedapi/types/rankevalmetricdiscountedcumulativegain.go +++ b/typedapi/types/rankevalmetricdiscountedcumulativegain.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RankEvalMetricDiscountedCumulativeGain type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/types.ts#L66-L77 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/types.ts#L66-L77 type RankEvalMetricDiscountedCumulativeGain struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. @@ -57,7 +57,7 @@ func (s *RankEvalMetricDiscountedCumulativeGain) UnmarshalJSON(data []byte) erro case "k": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *RankEvalMetricDiscountedCumulativeGain) UnmarshalJSON(data []byte) erro } case "normalize": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rankevalmetricexpectedreciprocalrank.go b/typedapi/types/rankevalmetricexpectedreciprocalrank.go index cbe07cbfdf..6a234d44c7 100644 --- a/typedapi/types/rankevalmetricexpectedreciprocalrank.go +++ b/typedapi/types/rankevalmetricexpectedreciprocalrank.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RankEvalMetricExpectedReciprocalRank type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/types.ts#L79-L88 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/types.ts#L79-L88 type RankEvalMetricExpectedReciprocalRank struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. @@ -57,7 +57,7 @@ func (s *RankEvalMetricExpectedReciprocalRank) UnmarshalJSON(data []byte) error case "k": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -73,7 +73,7 @@ func (s *RankEvalMetricExpectedReciprocalRank) UnmarshalJSON(data []byte) error case "maximum_relevance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rankevalmetricmeanreciprocalrank.go b/typedapi/types/rankevalmetricmeanreciprocalrank.go index 2c5308405d..ef2e6c3a02 100644 --- a/typedapi/types/rankevalmetricmeanreciprocalrank.go +++ b/typedapi/types/rankevalmetricmeanreciprocalrank.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RankEvalMetricMeanReciprocalRank type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/types.ts#L60-L64 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/types.ts#L60-L64 type RankEvalMetricMeanReciprocalRank struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. @@ -58,7 +58,7 @@ func (s *RankEvalMetricMeanReciprocalRank) UnmarshalJSON(data []byte) error { case "k": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -74,7 +74,7 @@ func (s *RankEvalMetricMeanReciprocalRank) UnmarshalJSON(data []byte) error { case "relevant_rating_threshold": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rankevalmetricprecision.go b/typedapi/types/rankevalmetricprecision.go index b7a6a6ad23..1a1a5373e1 100644 --- a/typedapi/types/rankevalmetricprecision.go +++ b/typedapi/types/rankevalmetricprecision.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RankEvalMetricPrecision type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/types.ts#L42-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/types.ts#L42-L52 type RankEvalMetricPrecision struct { // IgnoreUnlabeled Controls how unlabeled documents in the search results are counted. If set to // true, unlabeled documents are ignored and neither count as relevant or @@ -61,7 +61,7 @@ func (s *RankEvalMetricPrecision) UnmarshalJSON(data []byte) error { switch t { case "ignore_unlabeled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -76,7 +76,7 @@ func (s *RankEvalMetricPrecision) UnmarshalJSON(data []byte) error { case "k": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *RankEvalMetricPrecision) UnmarshalJSON(data []byte) error { case "relevant_rating_threshold": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rankevalmetricratingtreshold.go b/typedapi/types/rankevalmetricratingtreshold.go index 059344e95d..dc48b7da96 100644 --- a/typedapi/types/rankevalmetricratingtreshold.go +++ b/typedapi/types/rankevalmetricratingtreshold.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RankEvalMetricRatingTreshold type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/types.ts#L34-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/types.ts#L34-L40 type RankEvalMetricRatingTreshold struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. @@ -58,7 +58,7 @@ func (s *RankEvalMetricRatingTreshold) UnmarshalJSON(data []byte) error { case "k": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -74,7 +74,7 @@ func (s *RankEvalMetricRatingTreshold) UnmarshalJSON(data []byte) error { case "relevant_rating_threshold": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rankevalmetricrecall.go b/typedapi/types/rankevalmetricrecall.go index 2305b2a105..ec8ea83423 100644 --- a/typedapi/types/rankevalmetricrecall.go +++ b/typedapi/types/rankevalmetricrecall.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RankEvalMetricRecall type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/types.ts#L54-L58 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/types.ts#L54-L58 type RankEvalMetricRecall struct { // K Sets the maximum number of documents retrieved per query. This value will act // in place of the usual size parameter in the query. @@ -58,7 +58,7 @@ func (s *RankEvalMetricRecall) UnmarshalJSON(data []byte) error { case "k": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -74,7 +74,7 @@ func (s *RankEvalMetricRecall) UnmarshalJSON(data []byte) error { case "relevant_rating_threshold": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rankevalquery.go b/typedapi/types/rankevalquery.go index aa2314eee1..283df87ce8 100644 --- a/typedapi/types/rankevalquery.go +++ b/typedapi/types/rankevalquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RankEvalQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/types.ts#L111-L114 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/types.ts#L111-L114 type RankEvalQuery struct { Query Query `json:"query"` Size *int `json:"size,omitempty"` @@ -59,7 +59,7 @@ func (s *RankEvalQuery) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rankevalrequestitem.go b/typedapi/types/rankevalrequestitem.go index c465e82954..b95b0b4da9 100644 --- a/typedapi/types/rankevalrequestitem.go +++ b/typedapi/types/rankevalrequestitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // RankEvalRequestItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/types.ts#L98-L109 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/types.ts#L98-L109 type RankEvalRequestItem struct { // Id The search request’s ID, used to group result details later. Id string `json:"id"` diff --git a/typedapi/types/rankfeaturefunction.go b/typedapi/types/rankfeaturefunction.go index 0d0e61032b..2c573b504d 100644 --- a/typedapi/types/rankfeaturefunction.go +++ b/typedapi/types/rankfeaturefunction.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // RankFeatureFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L264-L264 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L273-L273 type RankFeatureFunction struct { } diff --git a/typedapi/types/rankfeaturefunctionlinear.go b/typedapi/types/rankfeaturefunctionlinear.go index 69dd4fc0c0..1898e977a7 100644 --- a/typedapi/types/rankfeaturefunctionlinear.go +++ b/typedapi/types/rankfeaturefunctionlinear.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // RankFeatureFunctionLinear type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L266-L266 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L275-L275 type RankFeatureFunctionLinear struct { } diff --git a/typedapi/types/rankfeaturefunctionlogarithm.go b/typedapi/types/rankfeaturefunctionlogarithm.go index 53ca20269d..324ff2f36a 100644 --- a/typedapi/types/rankfeaturefunctionlogarithm.go +++ b/typedapi/types/rankfeaturefunctionlogarithm.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RankFeatureFunctionLogarithm type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L268-L273 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L277-L282 type RankFeatureFunctionLogarithm struct { // ScalingFactor Configurable scaling factor. ScalingFactor float32 `json:"scaling_factor"` @@ -53,7 +53,7 @@ func (s *RankFeatureFunctionLogarithm) UnmarshalJSON(data []byte) error { switch t { case "scaling_factor": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rankfeaturefunctionsaturation.go b/typedapi/types/rankfeaturefunctionsaturation.go index 6b13b2e972..c4e12db520 100644 --- a/typedapi/types/rankfeaturefunctionsaturation.go +++ b/typedapi/types/rankfeaturefunctionsaturation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RankFeatureFunctionSaturation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L275-L280 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L284-L289 type RankFeatureFunctionSaturation struct { // Pivot Configurable pivot value so that the result will be less than 0.5. Pivot *float32 `json:"pivot,omitempty"` @@ -53,7 +53,7 @@ func (s *RankFeatureFunctionSaturation) UnmarshalJSON(data []byte) error { switch t { case "pivot": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rankfeaturefunctionsigmoid.go b/typedapi/types/rankfeaturefunctionsigmoid.go index 03af94a02a..e311e3ba2b 100644 --- a/typedapi/types/rankfeaturefunctionsigmoid.go +++ b/typedapi/types/rankfeaturefunctionsigmoid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RankFeatureFunctionSigmoid type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L282-L291 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L291-L300 type RankFeatureFunctionSigmoid struct { // Exponent Configurable Exponent. Exponent float32 `json:"exponent"` @@ -55,7 +55,7 @@ func (s *RankFeatureFunctionSigmoid) UnmarshalJSON(data []byte) error { switch t { case "exponent": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *RankFeatureFunctionSigmoid) UnmarshalJSON(data []byte) error { } case "pivot": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rankfeatureproperty.go b/typedapi/types/rankfeatureproperty.go index 4127f9bb6b..43eeeaa071 100644 --- a/typedapi/types/rankfeatureproperty.go +++ b/typedapi/types/rankfeatureproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // RankFeatureProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L184-L187 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L192-L195 type RankFeatureProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -72,7 +72,7 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -93,7 +93,7 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -183,12 +183,6 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -207,6 +201,18 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -363,6 +369,12 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -374,7 +386,7 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -397,7 +409,7 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { } case "positive_score_impact": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -417,7 +429,7 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -438,7 +450,7 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -528,12 +540,6 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -552,6 +558,18 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -708,6 +726,12 @@ func (s *RankFeatureProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { diff --git a/typedapi/types/rankfeaturequery.go b/typedapi/types/rankfeaturequery.go index ea70c7decf..0aeee27753 100644 --- a/typedapi/types/rankfeaturequery.go +++ b/typedapi/types/rankfeaturequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RankFeatureQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L293-L316 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L302-L325 type RankFeatureQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -72,7 +72,7 @@ func (s *RankFeatureQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rankfeaturesproperty.go b/typedapi/types/rankfeaturesproperty.go index eb9b918844..b04c85ce03 100644 --- a/typedapi/types/rankfeaturesproperty.go +++ b/typedapi/types/rankfeaturesproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // RankFeaturesProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L189-L192 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L197-L200 type RankFeaturesProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -72,7 +72,7 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -93,7 +93,7 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -183,12 +183,6 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -207,6 +201,18 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -363,6 +369,12 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -374,7 +386,7 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -397,7 +409,7 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { } case "positive_score_impact": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -417,7 +429,7 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -438,7 +450,7 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -528,12 +540,6 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -552,6 +558,18 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -708,6 +726,12 @@ func (s *RankFeaturesProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { diff --git a/typedapi/types/raretermsaggregation.go b/typedapi/types/raretermsaggregation.go index 4959bef1e9..80988f1109 100644 --- a/typedapi/types/raretermsaggregation.go +++ b/typedapi/types/raretermsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RareTermsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L687-L717 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L689-L719 type RareTermsAggregation struct { // Exclude Terms that should be excluded from the aggregation. Exclude []string `json:"exclude,omitempty"` @@ -40,12 +40,10 @@ type RareTermsAggregation struct { // Include Terms that should be included in the aggregation. Include TermsInclude `json:"include,omitempty"` // MaxDocCount The maximum number of documents a term should appear in. - MaxDocCount *int64 `json:"max_doc_count,omitempty"` - Meta Metadata `json:"meta,omitempty"` + MaxDocCount *int64 `json:"max_doc_count,omitempty"` // Missing The value to apply to documents that do not have a value. // By default, documents without a value are ignored. Missing Missing `json:"missing,omitempty"` - Name *string `json:"name,omitempty"` // Precision The precision of the internal CuckooFilters. // Smaller precision leads to better approximation, but higher memory usage. Precision *Float64 `json:"precision,omitempty"` @@ -94,7 +92,7 @@ func (s *RareTermsAggregation) UnmarshalJSON(data []byte) error { } case "max_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -108,30 +106,13 @@ func (s *RareTermsAggregation) UnmarshalJSON(data []byte) error { s.MaxDocCount = &f } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "missing": if err := dec.Decode(&s.Missing); err != nil { return fmt.Errorf("%s | %w", "Missing", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "precision": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rateaggregate.go b/typedapi/types/rateaggregate.go index 0277248b34..94df434e0b 100644 --- a/typedapi/types/rateaggregate.go +++ b/typedapi/types/rateaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RateAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L741-L745 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L748-L752 type RateAggregate struct { Meta Metadata `json:"meta,omitempty"` Value Float64 `json:"value"` @@ -59,7 +59,7 @@ func (s *RateAggregate) UnmarshalJSON(data []byte) error { } case "value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rateaggregation.go b/typedapi/types/rateaggregation.go index 0877ece6bd..499804cd5c 100644 --- a/typedapi/types/rateaggregation.go +++ b/typedapi/types/rateaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // RateAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L230-L241 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L230-L241 type RateAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` diff --git a/typedapi/types/readexception.go b/typedapi/types/readexception.go index 7d3d55d04a..81be807d7f 100644 --- a/typedapi/types/readexception.go +++ b/typedapi/types/readexception.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ReadException type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ccr/_types/FollowIndexStats.ts#L71-L75 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ccr/_types/FollowIndexStats.ts#L71-L75 type ReadException struct { Exception ErrorCause `json:"exception"` FromSeqNo int64 `json:"from_seq_no"` @@ -65,7 +65,7 @@ func (s *ReadException) UnmarshalJSON(data []byte) error { case "retries": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/readonlyurlrepository.go b/typedapi/types/readonlyurlrepository.go index e3f02f6f3b..ef4a2e081e 100644 --- a/typedapi/types/readonlyurlrepository.go +++ b/typedapi/types/readonlyurlrepository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ReadOnlyUrlRepository type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotRepository.ts#L60-L63 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotRepository.ts#L60-L63 type ReadOnlyUrlRepository struct { Settings ReadOnlyUrlRepositorySettings `json:"settings"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/readonlyurlrepositorysettings.go b/typedapi/types/readonlyurlrepositorysettings.go index dc2c548143..433d756158 100644 --- a/typedapi/types/readonlyurlrepositorysettings.go +++ b/typedapi/types/readonlyurlrepositorysettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ReadOnlyUrlRepositorySettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotRepository.ts#L110-L115 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotRepository.ts#L110-L115 type ReadOnlyUrlRepositorySettings struct { ChunkSize ByteSize `json:"chunk_size,omitempty"` Compress *bool `json:"compress,omitempty"` @@ -64,7 +64,7 @@ func (s *ReadOnlyUrlRepositorySettings) UnmarshalJSON(data []byte) error { } case "compress": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -79,7 +79,7 @@ func (s *ReadOnlyUrlRepositorySettings) UnmarshalJSON(data []byte) error { case "http_max_retries": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *ReadOnlyUrlRepositorySettings) UnmarshalJSON(data []byte) error { case "max_number_of_snapshots": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/realmcache.go b/typedapi/types/realmcache.go index 6383b03339..572c587958 100644 --- a/typedapi/types/realmcache.go +++ b/typedapi/types/realmcache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RealmCache type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L266-L268 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L266-L268 type RealmCache struct { Size int64 `json:"size"` } @@ -52,7 +52,7 @@ func (s *RealmCache) UnmarshalJSON(data []byte) error { switch t { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/realminfo.go b/typedapi/types/realminfo.go index 6c8d93731a..e59286deee 100644 --- a/typedapi/types/realminfo.go +++ b/typedapi/types/realminfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RealmInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/RealmInfo.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/RealmInfo.ts#L22-L25 type RealmInfo struct { Name string `json:"name"` Type string `json:"type"` diff --git a/typedapi/types/recording.go b/typedapi/types/recording.go index 7e5d1dd4aa..3f41ef0d4b 100644 --- a/typedapi/types/recording.go +++ b/typedapi/types/recording.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Recording type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L225-L230 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L225-L230 type Recording struct { CumulativeExecutionCount *int64 `json:"cumulative_execution_count,omitempty"` CumulativeExecutionTime Duration `json:"cumulative_execution_time,omitempty"` @@ -55,7 +55,7 @@ func (s *Recording) UnmarshalJSON(data []byte) error { switch t { case "cumulative_execution_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/recoverybytes.go b/typedapi/types/recoverybytes.go index 56164d9e95..c84ff79442 100644 --- a/typedapi/types/recoverybytes.go +++ b/typedapi/types/recoverybytes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // RecoveryBytes type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/recovery/types.ts#L38-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/recovery/types.ts#L38-L48 type RecoveryBytes struct { Percent Percentage `json:"percent"` Recovered ByteSize `json:"recovered,omitempty"` diff --git a/typedapi/types/recoveryfiles.go b/typedapi/types/recoveryfiles.go index 62f4a12850..37f7c099fb 100644 --- a/typedapi/types/recoveryfiles.go +++ b/typedapi/types/recoveryfiles.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RecoveryFiles type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/recovery/types.ts#L56-L62 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/recovery/types.ts#L56-L62 type RecoveryFiles struct { Details []FileDetails `json:"details,omitempty"` Percent Percentage `json:"percent"` @@ -66,7 +66,7 @@ func (s *RecoveryFiles) UnmarshalJSON(data []byte) error { } case "recovered": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -81,7 +81,7 @@ func (s *RecoveryFiles) UnmarshalJSON(data []byte) error { } case "reused": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *RecoveryFiles) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/recoveryindexstatus.go b/typedapi/types/recoveryindexstatus.go index a0be533b78..cc92c16307 100644 --- a/typedapi/types/recoveryindexstatus.go +++ b/typedapi/types/recoveryindexstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // RecoveryIndexStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/recovery/types.ts#L64-L74 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/recovery/types.ts#L64-L74 type RecoveryIndexStatus struct { Bytes *RecoveryBytes `json:"bytes,omitempty"` Files RecoveryFiles `json:"files"` diff --git a/typedapi/types/recoveryorigin.go b/typedapi/types/recoveryorigin.go index a1957b1f8d..e0ce3375bf 100644 --- a/typedapi/types/recoveryorigin.go +++ b/typedapi/types/recoveryorigin.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RecoveryOrigin type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/recovery/types.ts#L76-L89 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/recovery/types.ts#L76-L89 type RecoveryOrigin struct { BootstrapNewHistoryUuid *bool `json:"bootstrap_new_history_uuid,omitempty"` Host *string `json:"host,omitempty"` @@ -63,7 +63,7 @@ func (s *RecoveryOrigin) UnmarshalJSON(data []byte) error { switch t { case "bootstrap_new_history_uuid": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/recoveryrecord.go b/typedapi/types/recoveryrecord.go index a77304e3bc..edf0454049 100644 --- a/typedapi/types/recoveryrecord.go +++ b/typedapi/types/recoveryrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RecoveryRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/recovery/types.ts#L24-L155 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/recovery/types.ts#L24-L155 type RecoveryRecord struct { // Bytes The number of bytes to recover. Bytes *string `json:"bytes,omitempty"` diff --git a/typedapi/types/recoverystartstatus.go b/typedapi/types/recoverystartstatus.go index 7daa93a883..4d2ddcc2c2 100644 --- a/typedapi/types/recoverystartstatus.go +++ b/typedapi/types/recoverystartstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // RecoveryStartStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/recovery/types.ts#L91-L96 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/recovery/types.ts#L91-L96 type RecoveryStartStatus struct { CheckIndexTime Duration `json:"check_index_time,omitempty"` CheckIndexTimeInMillis int64 `json:"check_index_time_in_millis"` diff --git a/typedapi/types/recoverystats.go b/typedapi/types/recoverystats.go index 433010241d..0cc6717fc8 100644 --- a/typedapi/types/recoverystats.go +++ b/typedapi/types/recoverystats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RecoveryStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L228-L233 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L228-L233 type RecoveryStats struct { CurrentAsSource int64 `json:"current_as_source"` CurrentAsTarget int64 `json:"current_as_target"` @@ -55,7 +55,7 @@ func (s *RecoveryStats) UnmarshalJSON(data []byte) error { switch t { case "current_as_source": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *RecoveryStats) UnmarshalJSON(data []byte) error { } case "current_as_target": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/recoverystatus.go b/typedapi/types/recoverystatus.go index b08c677aa8..f76964bf7a 100644 --- a/typedapi/types/recoverystatus.go +++ b/typedapi/types/recoverystatus.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // RecoveryStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/recovery/types.ts#L98-L100 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/recovery/types.ts#L98-L100 type RecoveryStatus struct { Shards []ShardRecovery `json:"shards"` } diff --git a/typedapi/types/refreshstats.go b/typedapi/types/refreshstats.go index 23de4f253a..187b74a9a8 100644 --- a/typedapi/types/refreshstats.go +++ b/typedapi/types/refreshstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RefreshStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L235-L242 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L235-L242 type RefreshStats struct { ExternalTotal int64 `json:"external_total"` ExternalTotalTimeInMillis int64 `json:"external_total_time_in_millis"` @@ -57,7 +57,7 @@ func (s *RefreshStats) UnmarshalJSON(data []byte) error { switch t { case "external_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -77,7 +77,7 @@ func (s *RefreshStats) UnmarshalJSON(data []byte) error { } case "listeners": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *RefreshStats) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/regexoptions.go b/typedapi/types/regexoptions.go index 4996713c37..8951f480dc 100644 --- a/typedapi/types/regexoptions.go +++ b/typedapi/types/regexoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RegexOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L180-L191 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L183-L194 type RegexOptions struct { // Flags Optional operators for the regular expression. Flags string `json:"flags,omitempty"` @@ -68,7 +68,7 @@ func (s *RegexOptions) UnmarshalJSON(data []byte) error { case "max_determinized_states": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/regexpquery.go b/typedapi/types/regexpquery.go index c4979f5d61..67991f2a2f 100644 --- a/typedapi/types/regexpquery.go +++ b/typedapi/types/regexpquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RegexpQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L185-L215 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L187-L217 type RegexpQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -80,7 +80,7 @@ func (s *RegexpQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *RegexpQuery) UnmarshalJSON(data []byte) error { } case "case_insensitive": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -123,7 +123,7 @@ func (s *RegexpQuery) UnmarshalJSON(data []byte) error { case "max_determinized_states": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/regressioninferenceoptions.go b/typedapi/types/regressioninferenceoptions.go index 630e61c833..bfaac9c425 100644 --- a/typedapi/types/regressioninferenceoptions.go +++ b/typedapi/types/regressioninferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RegressionInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L82-L91 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L82-L91 type RegressionInferenceOptions struct { // NumTopFeatureImportanceValues Specifies the maximum number of feature importance values per document. NumTopFeatureImportanceValues *int `json:"num_top_feature_importance_values,omitempty"` @@ -57,7 +57,7 @@ func (s *RegressionInferenceOptions) UnmarshalJSON(data []byte) error { case "num_top_feature_importance_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/reindexdestination.go b/typedapi/types/reindexdestination.go index 8994a6d00a..e158b09e65 100644 --- a/typedapi/types/reindexdestination.go +++ b/typedapi/types/reindexdestination.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // ReindexDestination type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/reindex/types.ts#L39-L64 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/reindex/types.ts#L39-L64 type ReindexDestination struct { // Index The name of the data stream, index, or index alias you are copying to. Index string `json:"index"` diff --git a/typedapi/types/reindexnode.go b/typedapi/types/reindexnode.go index ae77cf991b..3114b0007e 100644 --- a/typedapi/types/reindexnode.go +++ b/typedapi/types/reindexnode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // ReindexNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/reindex_rethrottle/types.ts#L33-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/reindex_rethrottle/types.ts#L33-L35 type ReindexNode struct { Attributes map[string]string `json:"attributes"` Host string `json:"host"` diff --git a/typedapi/types/reindexsource.go b/typedapi/types/reindexsource.go index fdcb56831a..ea1fa0c7f7 100644 --- a/typedapi/types/reindexsource.go +++ b/typedapi/types/reindexsource.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ReindexSource type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/reindex/types.ts#L66-L97 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/reindex/types.ts#L66-L97 type ReindexSource struct { // Index The name of the data stream, index, or alias you are copying from. // Accepts a comma-separated list to reindex from multiple sources. @@ -102,7 +102,7 @@ func (s *ReindexSource) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/reindexstatus.go b/typedapi/types/reindexstatus.go index 1a4c26d53e..f9490ec3c3 100644 --- a/typedapi/types/reindexstatus.go +++ b/typedapi/types/reindexstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ReindexStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/reindex_rethrottle/types.ts#L37-L85 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/reindex_rethrottle/types.ts#L37-L85 type ReindexStatus struct { // Batches The number of scroll responses pulled back by the reindex. Batches int64 `json:"batches"` @@ -81,7 +81,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { switch t { case "batches": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { } case "created": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -111,7 +111,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { } case "deleted": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -126,7 +126,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { } case "noops": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -141,7 +141,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { } case "requests_per_second": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -182,7 +182,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -197,7 +197,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { } case "updated": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -212,7 +212,7 @@ func (s *ReindexStatus) UnmarshalJSON(data []byte) error { } case "version_conflicts": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/reindextask.go b/typedapi/types/reindextask.go index 90f11c3a49..2931998414 100644 --- a/typedapi/types/reindextask.go +++ b/typedapi/types/reindextask.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ReindexTask type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/reindex_rethrottle/types.ts#L87-L98 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/reindex_rethrottle/types.ts#L87-L98 type ReindexTask struct { Action string `json:"action"` Cancellable bool `json:"cancellable"` @@ -73,7 +73,7 @@ func (s *ReindexTask) UnmarshalJSON(data []byte) error { s.Action = o case "cancellable": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -104,7 +104,7 @@ func (s *ReindexTask) UnmarshalJSON(data []byte) error { } case "id": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/reloaddetails.go b/typedapi/types/reloaddetails.go index ab93206bd4..44abe1d4e2 100644 --- a/typedapi/types/reloaddetails.go +++ b/typedapi/types/reloaddetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ReloadDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/reload_search_analyzers/types.ts#L27-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/reload_search_analyzers/types.ts#L27-L31 type ReloadDetails struct { Index string `json:"index"` ReloadedAnalyzers []string `json:"reloaded_analyzers"` diff --git a/typedapi/types/reloadresult.go b/typedapi/types/reloadresult.go index 63acbdf5de..7c1de4631f 100644 --- a/typedapi/types/reloadresult.go +++ b/typedapi/types/reloadresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ReloadResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/reload_search_analyzers/types.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/reload_search_analyzers/types.ts#L22-L25 type ReloadResult struct { ReloadDetails []ReloadDetails `json:"reload_details"` Shards_ ShardStatistics `json:"_shards"` diff --git a/typedapi/types/relocationfailureinfo.go b/typedapi/types/relocationfailureinfo.go index c3f3811b84..4f754e0c79 100644 --- a/typedapi/types/relocationfailureinfo.go +++ b/typedapi/types/relocationfailureinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RelocationFailureInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Node.ts#L73-L75 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Node.ts#L73-L75 type RelocationFailureInfo struct { FailedAttempts int `json:"failed_attempts"` } @@ -53,7 +53,7 @@ func (s *RelocationFailureInfo) UnmarshalJSON(data []byte) error { case "failed_attempts": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/remotesource.go b/typedapi/types/remotesource.go index e7ea68df2d..0b96e75b37 100644 --- a/typedapi/types/remotesource.go +++ b/typedapi/types/remotesource.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // RemoteSource type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/reindex/types.ts#L99-L125 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/reindex/types.ts#L99-L125 type RemoteSource struct { // ConnectTimeout The remote connection timeout. // Defaults to 30 seconds. diff --git a/typedapi/types/removeaction.go b/typedapi/types/removeaction.go index 49e8e022ce..3ff6baf412 100644 --- a/typedapi/types/removeaction.go +++ b/typedapi/types/removeaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RemoveAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/update_aliases/types.ts#L97-L122 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/update_aliases/types.ts#L97-L122 type RemoveAction struct { // Alias Alias for the action. // Index alias names support date math. @@ -107,7 +107,7 @@ func (s *RemoveAction) UnmarshalJSON(data []byte) error { } case "must_exist": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/removeduplicatestokenfilter.go b/typedapi/types/removeduplicatestokenfilter.go index ac949c7f70..57c99099f7 100644 --- a/typedapi/types/removeduplicatestokenfilter.go +++ b/typedapi/types/removeduplicatestokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // RemoveDuplicatesTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L301-L303 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L303-L305 type RemoveDuplicatesTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` diff --git a/typedapi/types/removeindexaction.go b/typedapi/types/removeindexaction.go index 2307cf90cf..120721629e 100644 --- a/typedapi/types/removeindexaction.go +++ b/typedapi/types/removeindexaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RemoveIndexAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/update_aliases/types.ts#L124-L139 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/update_aliases/types.ts#L124-L139 type RemoveIndexAction struct { // Index Data stream or index for the action. // Supports wildcards (`*`). @@ -80,7 +80,7 @@ func (s *RemoveIndexAction) UnmarshalJSON(data []byte) error { } case "must_exist": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/removeprocessor.go b/typedapi/types/removeprocessor.go index 9b62714823..cc135be13e 100644 --- a/typedapi/types/removeprocessor.go +++ b/typedapi/types/removeprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RemoveProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L941-L951 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L941-L955 type RemoveProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -45,6 +45,9 @@ type RemoveProcessor struct { // IgnoreMissing If `true` and `field` does not exist or is `null`, the processor quietly // exits without modifying the document. IgnoreMissing *bool `json:"ignore_missing,omitempty"` + // Keep Fields to be kept. When set, all fields other than those specified are + // removed. + Keep []string `json:"keep,omitempty"` // OnFailure Handle failures for the processor. OnFailure []ProcessorContainer `json:"on_failure,omitempty"` // Tag Identifier for the processor. @@ -108,7 +111,7 @@ func (s *RemoveProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -122,7 +125,7 @@ func (s *RemoveProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -135,6 +138,22 @@ func (s *RemoveProcessor) UnmarshalJSON(data []byte) error { s.IgnoreMissing = &v } + case "keep": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(string) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Keep", err) + } + + s.Keep = append(s.Keep, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Keep); err != nil { + return fmt.Errorf("%s | %w", "Keep", err) + } + } + case "on_failure": if err := dec.Decode(&s.OnFailure); err != nil { return fmt.Errorf("%s | %w", "OnFailure", err) diff --git a/typedapi/types/renameprocessor.go b/typedapi/types/renameprocessor.go index d2796941b5..6fa7185c02 100644 --- a/typedapi/types/renameprocessor.go +++ b/typedapi/types/renameprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RenameProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L953-L969 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L957-L973 type RenameProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -101,7 +101,7 @@ func (s *RenameProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -115,7 +115,7 @@ func (s *RenameProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/reportingemailattachment.go b/typedapi/types/reportingemailattachment.go index 02bb03cc58..6617e92068 100644 --- a/typedapi/types/reportingemailattachment.go +++ b/typedapi/types/reportingemailattachment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ReportingEmailAttachment type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L224-L232 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L224-L232 type ReportingEmailAttachment struct { Inline *bool `json:"inline,omitempty"` Interval Duration `json:"interval,omitempty"` @@ -56,7 +56,7 @@ func (s *ReportingEmailAttachment) UnmarshalJSON(data []byte) error { switch t { case "inline": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -81,7 +81,7 @@ func (s *ReportingEmailAttachment) UnmarshalJSON(data []byte) error { case "retries": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/repositoriesrecord.go b/typedapi/types/repositoriesrecord.go index 04fced144e..594f7666e9 100644 --- a/typedapi/types/repositoriesrecord.go +++ b/typedapi/types/repositoriesrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RepositoriesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/repositories/types.ts#L20-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/repositories/types.ts#L20-L31 type RepositoriesRecord struct { // Id The unique repository identifier. Id *string `json:"id,omitempty"` diff --git a/typedapi/types/repository.go b/typedapi/types/repository.go index 83a45968d4..6a01a27114 100644 --- a/typedapi/types/repository.go +++ b/typedapi/types/repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -29,5 +29,5 @@ package types // ReadOnlyUrlRepository // SourceOnlyRepository // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotRepository.ts#L24-L34 -type Repository interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotRepository.ts#L24-L34 +type Repository any diff --git a/typedapi/types/repositoryintegrityindicator.go b/typedapi/types/repositoryintegrityindicator.go index 75e136ead4..e69cacf817 100644 --- a/typedapi/types/repositoryintegrityindicator.go +++ b/typedapi/types/repositoryintegrityindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // RepositoryIntegrityIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L134-L138 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L134-L138 type RepositoryIntegrityIndicator struct { Details *RepositoryIntegrityIndicatorDetails `json:"details,omitempty"` Diagnosis []Diagnosis `json:"diagnosis,omitempty"` diff --git a/typedapi/types/repositoryintegrityindicatordetails.go b/typedapi/types/repositoryintegrityindicatordetails.go index 651c01ec67..bf81fe7fc2 100644 --- a/typedapi/types/repositoryintegrityindicatordetails.go +++ b/typedapi/types/repositoryintegrityindicatordetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RepositoryIntegrityIndicatorDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L139-L143 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L139-L143 type RepositoryIntegrityIndicatorDetails struct { Corrupted []string `json:"corrupted,omitempty"` CorruptedRepositories *int64 `json:"corrupted_repositories,omitempty"` @@ -59,7 +59,7 @@ func (s *RepositoryIntegrityIndicatorDetails) UnmarshalJSON(data []byte) error { } case "corrupted_repositories": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -74,7 +74,7 @@ func (s *RepositoryIntegrityIndicatorDetails) UnmarshalJSON(data []byte) error { } case "total_repositories": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/repositorylocation.go b/typedapi/types/repositorylocation.go index 4b99c2ef02..7b50400f4d 100644 --- a/typedapi/types/repositorylocation.go +++ b/typedapi/types/repositorylocation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RepositoryLocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/RepositoryMeteringInformation.ts#L68-L74 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/RepositoryMeteringInformation.ts#L68-L74 type RepositoryLocation struct { BasePath string `json:"base_path"` // Bucket Bucket name (GCP, S3) diff --git a/typedapi/types/repositorymeteringinformation.go b/typedapi/types/repositorymeteringinformation.go index 7163a1dc4c..7f414b874c 100644 --- a/typedapi/types/repositorymeteringinformation.go +++ b/typedapi/types/repositorymeteringinformation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RepositoryMeteringInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/RepositoryMeteringInformation.ts#L24-L66 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/RepositoryMeteringInformation.ts#L24-L66 type RepositoryMeteringInformation struct { // Archived A flag that tells whether or not this object has been archived. When a // repository is closed or updated the @@ -83,7 +83,7 @@ func (s *RepositoryMeteringInformation) UnmarshalJSON(data []byte) error { switch t { case "archived": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/requestcachestats.go b/typedapi/types/requestcachestats.go index fff22534a0..63661f3713 100644 --- a/typedapi/types/requestcachestats.go +++ b/typedapi/types/requestcachestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RequestCacheStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L244-L250 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L244-L250 type RequestCacheStats struct { Evictions int64 `json:"evictions"` HitCount int64 `json:"hit_count"` @@ -56,7 +56,7 @@ func (s *RequestCacheStats) UnmarshalJSON(data []byte) error { switch t { case "evictions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *RequestCacheStats) UnmarshalJSON(data []byte) error { } case "hit_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -98,7 +98,7 @@ func (s *RequestCacheStats) UnmarshalJSON(data []byte) error { s.MemorySize = &o case "memory_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,7 +113,7 @@ func (s *RequestCacheStats) UnmarshalJSON(data []byte) error { } case "miss_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/requestcounts.go b/typedapi/types/requestcounts.go index 1add31568a..8d588802c2 100644 --- a/typedapi/types/requestcounts.go +++ b/typedapi/types/requestcounts.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RequestCounts type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/RepositoryMeteringInformation.ts#L76-L103 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/RepositoryMeteringInformation.ts#L76-L103 type RequestCounts struct { // GetBlob Number of Get Blob requests (Azure) GetBlob *int64 `json:"GetBlob,omitempty"` @@ -78,7 +78,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { switch t { case "GetBlob": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -93,7 +93,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { } case "GetBlobProperties": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -108,7 +108,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { } case "GetObject": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -123,7 +123,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { } case "InsertObject": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -138,7 +138,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { } case "ListBlobs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -153,7 +153,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { } case "ListObjects": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -168,7 +168,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { } case "PutBlob": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -183,7 +183,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { } case "PutBlock": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -198,7 +198,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { } case "PutBlockList": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -213,7 +213,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { } case "PutMultipartObject": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -228,7 +228,7 @@ func (s *RequestCounts) UnmarshalJSON(data []byte) error { } case "PutObject": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/requestitem.go b/typedapi/types/requestitem.go index 3fe8f33968..ccccd9a042 100644 --- a/typedapi/types/requestitem.go +++ b/typedapi/types/requestitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // MultisearchHeader // TemplateConfig // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/msearch_template/types.ts#L25-L26 -type RequestItem interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/msearch_template/types.ts#L25-L26 +type RequestItem any diff --git a/typedapi/types/reroutedecision.go b/typedapi/types/reroutedecision.go index 7fe2c3dfa0..d2a77d58a4 100644 --- a/typedapi/types/reroutedecision.go +++ b/typedapi/types/reroutedecision.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RerouteDecision type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/reroute/types.ts#L86-L90 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/reroute/types.ts#L86-L90 type RerouteDecision struct { Decider string `json:"decider"` Decision string `json:"decision"` diff --git a/typedapi/types/rerouteexplanation.go b/typedapi/types/rerouteexplanation.go index 4f9c350e89..f0ea2ef784 100644 --- a/typedapi/types/rerouteexplanation.go +++ b/typedapi/types/rerouteexplanation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RerouteExplanation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/reroute/types.ts#L92-L96 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/reroute/types.ts#L92-L96 type RerouteExplanation struct { Command string `json:"command"` Decisions []RerouteDecision `json:"decisions"` diff --git a/typedapi/types/rerouteparameters.go b/typedapi/types/rerouteparameters.go index 5f516254e8..bf54deef40 100644 --- a/typedapi/types/rerouteparameters.go +++ b/typedapi/types/rerouteparameters.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RerouteParameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/reroute/types.ts#L98-L105 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/reroute/types.ts#L98-L105 type RerouteParameters struct { AllowPrimary bool `json:"allow_primary"` FromNode *string `json:"from_node,omitempty"` @@ -57,7 +57,7 @@ func (s *RerouteParameters) UnmarshalJSON(data []byte) error { switch t { case "allow_primary": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *RerouteParameters) UnmarshalJSON(data []byte) error { case "shard": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rerouteprocessor.go b/typedapi/types/rerouteprocessor.go index e1a5c7fae7..7c11f50f55 100644 --- a/typedapi/types/rerouteprocessor.go +++ b/typedapi/types/rerouteprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RerouteProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L971-L999 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L975-L1003 type RerouteProcessor struct { // Dataset Field references or a static value for the dataset part of the data stream // name. @@ -146,7 +146,7 @@ func (s *RerouteProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rescore.go b/typedapi/types/rescore.go index b13eb85e18..d0f840673d 100644 --- a/typedapi/types/rescore.go +++ b/typedapi/types/rescore.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,10 +31,11 @@ import ( // Rescore type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/rescoring.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/rescoring.ts#L25-L38 type Rescore struct { - Query RescoreQuery `json:"query"` - WindowSize *int `json:"window_size,omitempty"` + LearningToRank *LearningToRank `json:"learning_to_rank,omitempty"` + Query *RescoreQuery `json:"query,omitempty"` + WindowSize *int `json:"window_size,omitempty"` } func (s *Rescore) UnmarshalJSON(data []byte) error { @@ -52,6 +53,11 @@ func (s *Rescore) UnmarshalJSON(data []byte) error { switch t { + case "learning_to_rank": + if err := dec.Decode(&s.LearningToRank); err != nil { + return fmt.Errorf("%s | %w", "LearningToRank", err) + } + case "query": if err := dec.Decode(&s.Query); err != nil { return fmt.Errorf("%s | %w", "Query", err) @@ -59,7 +65,7 @@ func (s *Rescore) UnmarshalJSON(data []byte) error { case "window_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rescorequery.go b/typedapi/types/rescorequery.go index d89f146c81..ea6a689ab8 100644 --- a/typedapi/types/rescorequery.go +++ b/typedapi/types/rescorequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // RescoreQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/rescoring.ts#L28-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/rescoring.ts#L40-L62 type RescoreQuery struct { // Query The query to use for rescoring. // This query is only run on the Top-K results returned by the `query` and @@ -68,7 +68,7 @@ func (s *RescoreQuery) UnmarshalJSON(data []byte) error { } case "query_weight": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -84,7 +84,7 @@ func (s *RescoreQuery) UnmarshalJSON(data []byte) error { } case "rescore_query_weight": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/reservedsize.go b/typedapi/types/reservedsize.go index b713d3844d..e373329bc1 100644 --- a/typedapi/types/reservedsize.go +++ b/typedapi/types/reservedsize.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ReservedSize type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/allocation_explain/types.ts#L71-L76 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/allocation_explain/types.ts#L71-L76 type ReservedSize struct { NodeId string `json:"node_id"` Path string `json:"path"` @@ -77,7 +77,7 @@ func (s *ReservedSize) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/resolveclusterinfo.go b/typedapi/types/resolveclusterinfo.go index 6ef249ee48..229fca5670 100644 --- a/typedapi/types/resolveclusterinfo.go +++ b/typedapi/types/resolveclusterinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ResolveClusterInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/resolve_cluster/ResolveClusterResponse.ts#L29-L55 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/resolve_cluster/ResolveClusterResponse.ts#L29-L55 type ResolveClusterInfo struct { // Connected Whether the remote cluster is connected to the local (querying) cluster. Connected bool `json:"connected"` @@ -66,7 +66,7 @@ func (s *ResolveClusterInfo) UnmarshalJSON(data []byte) error { switch t { case "connected": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *ResolveClusterInfo) UnmarshalJSON(data []byte) error { s.Error = &o case "matching_indices": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -106,7 +106,7 @@ func (s *ResolveClusterInfo) UnmarshalJSON(data []byte) error { } case "skip_unavailable": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/resolveindexaliasitem.go b/typedapi/types/resolveindexaliasitem.go index a3aa31a9db..021f3dd0c2 100644 --- a/typedapi/types/resolveindexaliasitem.go +++ b/typedapi/types/resolveindexaliasitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ResolveIndexAliasItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/resolve_index/ResolveIndexResponse.ts#L37-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/resolve_index/ResolveIndexResponse.ts#L37-L40 type ResolveIndexAliasItem struct { Indices []string `json:"indices"` Name string `json:"name"` diff --git a/typedapi/types/resolveindexdatastreamsitem.go b/typedapi/types/resolveindexdatastreamsitem.go index e5ada75c85..945c6c1b9e 100644 --- a/typedapi/types/resolveindexdatastreamsitem.go +++ b/typedapi/types/resolveindexdatastreamsitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ResolveIndexDataStreamsItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/resolve_index/ResolveIndexResponse.ts#L42-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/resolve_index/ResolveIndexResponse.ts#L42-L46 type ResolveIndexDataStreamsItem struct { BackingIndices []string `json:"backing_indices"` Name string `json:"name"` diff --git a/typedapi/types/resolveindexitem.go b/typedapi/types/resolveindexitem.go index 2f756467c4..3e3aeb8ced 100644 --- a/typedapi/types/resolveindexitem.go +++ b/typedapi/types/resolveindexitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ResolveIndexItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/resolve_index/ResolveIndexResponse.ts#L30-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/resolve_index/ResolveIndexResponse.ts#L30-L35 type ResolveIndexItem struct { Aliases []string `json:"aliases,omitempty"` Attributes []string `json:"attributes"` diff --git a/typedapi/types/resourceprivileges.go b/typedapi/types/resourceprivileges.go index f0f6bef142..3cf76ba1e0 100644 --- a/typedapi/types/resourceprivileges.go +++ b/typedapi/types/resourceprivileges.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ResourcePrivileges type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/has_privileges/types.ts#L47-L47 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/has_privileges/types.ts#L47-L47 type ResourcePrivileges map[string]Privileges diff --git a/typedapi/types/responsebody.go b/typedapi/types/responsebody.go index 9868c602b0..79c77e440a 100644 --- a/typedapi/types/responsebody.go +++ b/typedapi/types/responsebody.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // ResponseBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/SearchResponse.ts#L38-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/SearchResponse.ts#L38-L54 type ResponseBody struct { Aggregations map[string]Aggregate `json:"aggregations,omitempty"` Clusters_ *ClusterStatistics `json:"_clusters,omitempty"` @@ -521,7 +521,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -571,7 +571,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -581,7 +581,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -609,7 +609,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { } case "max_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -625,7 +625,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { } case "num_reduce_phases": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -703,7 +703,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { s.Suggest[elems[1]] = append(s.Suggest[elems[1]], o) default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -713,7 +713,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Suggest") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Suggest", err) } @@ -723,7 +723,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { } case "terminated_early": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -737,7 +737,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { } case "timed_out": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -751,7 +751,7 @@ func (s *ResponseBody) UnmarshalJSON(data []byte) error { } case "took": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/responseitem.go b/typedapi/types/responseitem.go index 407822b1e8..c9dfea2d5c 100644 --- a/typedapi/types/responseitem.go +++ b/typedapi/types/responseitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ResponseItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/bulk/types.ts#L37-L81 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/bulk/types.ts#L37-L81 type ResponseItem struct { // Error Contains additional information about the failed operation. // The parameter is only returned for failed operations. @@ -39,7 +39,7 @@ type ResponseItem struct { ForcedRefresh *bool `json:"forced_refresh,omitempty"` Get *InlineGetDictUserDefined `json:"get,omitempty"` // Id_ The document ID associated with the operation. - Id_ string `json:"_id,omitempty"` + Id_ *string `json:"_id,omitempty"` // Index_ Name of the index associated with the operation. // If the operation targeted a data stream, this is the backing index into which // the document was written. @@ -83,7 +83,7 @@ func (s *ResponseItem) UnmarshalJSON(data []byte) error { } case "forced_refresh": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -111,7 +111,7 @@ func (s *ResponseItem) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.Id_ = o + s.Id_ = &o case "_index": var tmp json.RawMessage @@ -126,7 +126,7 @@ func (s *ResponseItem) UnmarshalJSON(data []byte) error { s.Index_ = o case "_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -164,7 +164,7 @@ func (s *ResponseItem) UnmarshalJSON(data []byte) error { case "status": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/retention.go b/typedapi/types/retention.go index e4ca65b60d..a839f4e715 100644 --- a/typedapi/types/retention.go +++ b/typedapi/types/retention.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Retention type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/_types/SnapshotLifecycle.ts#L84-L97 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/_types/SnapshotLifecycle.ts#L84-L97 type Retention struct { // ExpireAfter Time period after which a snapshot is considered expired and eligible for // deletion. SLM deletes expired snapshots based on the slm.retention_schedule. @@ -66,7 +66,7 @@ func (s *Retention) UnmarshalJSON(data []byte) error { case "max_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -82,7 +82,7 @@ func (s *Retention) UnmarshalJSON(data []byte) error { case "min_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/retentionlease.go b/typedapi/types/retentionlease.go index f4e42baab9..07f09284d5 100644 --- a/typedapi/types/retentionlease.go +++ b/typedapi/types/retentionlease.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // RetentionLease type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L65-L67 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L65-L67 type RetentionLease struct { Period Duration `json:"period"` } diff --git a/typedapi/types/retentionpolicy.go b/typedapi/types/retentionpolicy.go index 4dfa93fc8a..24063c5a93 100644 --- a/typedapi/types/retentionpolicy.go +++ b/typedapi/types/retentionpolicy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // RetentionPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/_types/Transform.ts#L88-L96 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/_types/Transform.ts#L88-L96 type RetentionPolicy struct { // Field The date field that is used to calculate the age of the document. Field string `json:"field"` diff --git a/typedapi/types/retentionpolicycontainer.go b/typedapi/types/retentionpolicycontainer.go index 9808384351..a3f24d9e7b 100644 --- a/typedapi/types/retentionpolicycontainer.go +++ b/typedapi/types/retentionpolicycontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // RetentionPolicyContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/_types/Transform.ts#L80-L86 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/_types/Transform.ts#L80-L86 type RetentionPolicyContainer struct { // Time Specifies that the transform uses a time field to set the retention policy. Time *RetentionPolicy `json:"time,omitempty"` diff --git a/typedapi/types/retries.go b/typedapi/types/retries.go index ad93cd24db..981056b7ce 100644 --- a/typedapi/types/retries.go +++ b/typedapi/types/retries.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Retries type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Retries.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Retries.ts#L22-L25 type Retries struct { Bulk int64 `json:"bulk"` Search int64 `json:"search"` @@ -53,7 +53,7 @@ func (s *Retries) UnmarshalJSON(data []byte) error { switch t { case "bulk": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -68,7 +68,7 @@ func (s *Retries) UnmarshalJSON(data []byte) error { } case "search": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/retrievercontainer.go b/typedapi/types/retrievercontainer.go new file mode 100644 index 0000000000..250f358b7f --- /dev/null +++ b/typedapi/types/retrievercontainer.go @@ -0,0 +1,40 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +// RetrieverContainer type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Retriever.ts#L26-L36 +type RetrieverContainer struct { + // Knn A retriever that replaces the functionality of a knn search. + Knn *KnnRetriever `json:"knn,omitempty"` + // Rrf A retriever that produces top documents from reciprocal rank fusion (RRF). + Rrf *RRFRetriever `json:"rrf,omitempty"` + // Standard A retriever that replaces the functionality of a traditional query. + Standard *StandardRetriever `json:"standard,omitempty"` +} + +// NewRetrieverContainer returns a RetrieverContainer. +func NewRetrieverContainer() *RetrieverContainer { + r := &RetrieverContainer{} + + return r +} diff --git a/typedapi/types/reversenestedaggregate.go b/typedapi/types/reversenestedaggregate.go index 92e954d8b7..83970e8120 100644 --- a/typedapi/types/reversenestedaggregate.go +++ b/typedapi/types/reversenestedaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // ReverseNestedAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L489-L490 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L493-L494 type ReverseNestedAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -55,7 +55,7 @@ func (s *ReverseNestedAggregate) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +519,7 @@ func (s *ReverseNestedAggregate) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -569,7 +569,7 @@ func (s *ReverseNestedAggregate) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -579,7 +579,7 @@ func (s *ReverseNestedAggregate) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -596,7 +596,7 @@ func (s *ReverseNestedAggregate) UnmarshalJSON(data []byte) error { func (s ReverseNestedAggregate) MarshalJSON() ([]byte, error) { type opt ReverseNestedAggregate // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/reversenestedaggregation.go b/typedapi/types/reversenestedaggregation.go index 35f692abfa..7d3ba12de7 100644 --- a/typedapi/types/reversenestedaggregation.go +++ b/typedapi/types/reversenestedaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,15 +26,12 @@ import ( "errors" "fmt" "io" - "strconv" ) // ReverseNestedAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L719-L725 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L721-L727 type ReverseNestedAggregation struct { - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` // Path Defines the nested object field that should be joined back to. // The default is empty, which means that it joins back to the root/main // document level. @@ -56,23 +53,6 @@ func (s *ReverseNestedAggregation) UnmarshalJSON(data []byte) error { switch t { - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "path": if err := dec.Decode(&s.Path); err != nil { return fmt.Errorf("%s | %w", "Path", err) diff --git a/typedapi/types/reversetokenfilter.go b/typedapi/types/reversetokenfilter.go index be420af8ce..e59882cde6 100644 --- a/typedapi/types/reversetokenfilter.go +++ b/typedapi/types/reversetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ReverseTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L305-L307 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L307-L309 type ReverseTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` diff --git a/typedapi/types/role.go b/typedapi/types/role.go index 783d8d68ce..df92301f0f 100644 --- a/typedapi/types/role.go +++ b/typedapi/types/role.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // Role type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_role/types.ts#L29-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_role/types.ts#L29-L42 type Role struct { Applications []ApplicationPrivileges `json:"applications"` Cluster []string `json:"cluster"` diff --git a/typedapi/types/roledescriptor.go b/typedapi/types/roledescriptor.go index d8e54de388..8629b35216 100644 --- a/typedapi/types/roledescriptor.go +++ b/typedapi/types/roledescriptor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // RoleDescriptor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/RoleDescriptor.ts#L28-L56 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/RoleDescriptor.ts#L28-L56 type RoleDescriptor struct { // Applications A list of application privilege entries Applications []ApplicationPrivileges `json:"applications,omitempty"` diff --git a/typedapi/types/roledescriptorread.go b/typedapi/types/roledescriptorread.go index bd6ad80f29..f9b6b5a347 100644 --- a/typedapi/types/roledescriptorread.go +++ b/typedapi/types/roledescriptorread.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // RoleDescriptorRead type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/RoleDescriptor.ts#L58-L86 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/RoleDescriptor.ts#L58-L86 type RoleDescriptorRead struct { // Applications A list of application privilege entries Applications []ApplicationPrivileges `json:"applications,omitempty"` diff --git a/typedapi/types/roledescriptorwrapper.go b/typedapi/types/roledescriptorwrapper.go index 5f121757af..4fe0c47173 100644 --- a/typedapi/types/roledescriptorwrapper.go +++ b/typedapi/types/roledescriptorwrapper.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // RoleDescriptorWrapper type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_service_accounts/types.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_service_accounts/types.ts#L22-L24 type RoleDescriptorWrapper struct { RoleDescriptor RoleDescriptorRead `json:"role_descriptor"` } diff --git a/typedapi/types/rolemappingrule.go b/typedapi/types/rolemappingrule.go index 30cab48c13..7f5fd98161 100644 --- a/typedapi/types/rolemappingrule.go +++ b/typedapi/types/rolemappingrule.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // RoleMappingRule type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/RoleMappingRule.ts#L23-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/RoleMappingRule.ts#L23-L34 type RoleMappingRule struct { All []RoleMappingRule `json:"all,omitempty"` Any []RoleMappingRule `json:"any,omitempty"` diff --git a/typedapi/types/roletemplate.go b/typedapi/types/roletemplate.go index 647ef6edf6..3c85a0be17 100644 --- a/typedapi/types/roletemplate.go +++ b/typedapi/types/roletemplate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // RoleTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/RoleTemplate.ts#L28-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/RoleTemplate.ts#L28-L31 type RoleTemplate struct { Format *templateformat.TemplateFormat `json:"format,omitempty"` Template Script `json:"template"` diff --git a/typedapi/types/roletemplateinlinequery.go b/typedapi/types/roletemplateinlinequery.go index 1e7122f7ec..3fa5ad433e 100644 --- a/typedapi/types/roletemplateinlinequery.go +++ b/typedapi/types/roletemplateinlinequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // string // Query // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/Privileges.ts#L160-L161 -type RoleTemplateInlineQuery interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/Privileges.ts#L279-L280 +type RoleTemplateInlineQuery any diff --git a/typedapi/types/roletemplateinlinescript.go b/typedapi/types/roletemplateinlinescript.go index 7f555ee51a..e4a71d31a2 100644 --- a/typedapi/types/roletemplateinlinescript.go +++ b/typedapi/types/roletemplateinlinescript.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // RoleTemplateInlineScript type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/Privileges.ts#L153-L158 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/Privileges.ts#L272-L277 type RoleTemplateInlineScript struct { Lang *scriptlanguage.ScriptLanguage `json:"lang,omitempty"` Options map[string]string `json:"options,omitempty"` diff --git a/typedapi/types/roletemplatequery.go b/typedapi/types/roletemplatequery.go index e74b5d7635..553692fffb 100644 --- a/typedapi/types/roletemplatequery.go +++ b/typedapi/types/roletemplatequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // RoleTemplateQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/Privileges.ts#L141-L151 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/Privileges.ts#L260-L270 type RoleTemplateQuery struct { // Template When you create a role, you can specify a query that defines the document // level security permissions. You can optionally diff --git a/typedapi/types/roletemplatescript.go b/typedapi/types/roletemplatescript.go index 23d039f13a..8421df7829 100644 --- a/typedapi/types/roletemplatescript.go +++ b/typedapi/types/roletemplatescript.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // RoleTemplateInlineScript // StoredScriptId // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/Privileges.ts#L163-L164 -type RoleTemplateScript interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/Privileges.ts#L282-L283 +type RoleTemplateScript any diff --git a/typedapi/types/rolloverconditions.go b/typedapi/types/rolloverconditions.go index df64918774..c8ce976fcd 100644 --- a/typedapi/types/rolloverconditions.go +++ b/typedapi/types/rolloverconditions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RolloverConditions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/rollover/types.ts#L24-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/rollover/types.ts#L24-L40 type RolloverConditions struct { MaxAge Duration `json:"max_age,omitempty"` MaxAgeMillis *int64 `json:"max_age_millis,omitempty"` @@ -76,7 +76,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { } case "max_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { } case "max_primary_shard_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -111,7 +111,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { } case "max_primary_shard_size_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -131,7 +131,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { } case "max_size_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -151,7 +151,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { } case "min_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -166,7 +166,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { } case "min_primary_shard_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -186,7 +186,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { } case "min_primary_shard_size_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -206,7 +206,7 @@ func (s *RolloverConditions) UnmarshalJSON(data []byte) error { } case "min_size_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rollupcapabilities.go b/typedapi/types/rollupcapabilities.go index 2c5a4df48b..8df749f983 100644 --- a/typedapi/types/rollupcapabilities.go +++ b/typedapi/types/rollupcapabilities.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // RollupCapabilities type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/get_rollup_caps/types.ts#L25-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/get_rollup_caps/types.ts#L25-L27 type RollupCapabilities struct { RollupJobs []RollupCapabilitySummary `json:"rollup_jobs"` } diff --git a/typedapi/types/rollupcapabilitysummary.go b/typedapi/types/rollupcapabilitysummary.go index 0108db2e0f..0f90867005 100644 --- a/typedapi/types/rollupcapabilitysummary.go +++ b/typedapi/types/rollupcapabilitysummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RollupCapabilitySummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/get_rollup_caps/types.ts#L29-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/get_rollup_caps/types.ts#L29-L34 type RollupCapabilitySummary struct { Fields map[string][]RollupFieldSummary `json:"fields"` IndexPattern string `json:"index_pattern"` diff --git a/typedapi/types/rollupfieldsummary.go b/typedapi/types/rollupfieldsummary.go index 8404bf1798..f429469a7c 100644 --- a/typedapi/types/rollupfieldsummary.go +++ b/typedapi/types/rollupfieldsummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RollupFieldSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/get_rollup_caps/types.ts#L36-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/get_rollup_caps/types.ts#L36-L40 type RollupFieldSummary struct { Agg string `json:"agg"` CalendarInterval Duration `json:"calendar_interval,omitempty"` diff --git a/typedapi/types/rollupjob.go b/typedapi/types/rollupjob.go index 6ac72cfed3..8547c025b9 100644 --- a/typedapi/types/rollupjob.go +++ b/typedapi/types/rollupjob.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // RollupJob type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/get_jobs/types.ts#L28-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/get_jobs/types.ts#L28-L32 type RollupJob struct { Config RollupJobConfiguration `json:"config"` Stats RollupJobStats `json:"stats"` diff --git a/typedapi/types/rollupjobconfiguration.go b/typedapi/types/rollupjobconfiguration.go index 13fdc2a6a4..656ecb4135 100644 --- a/typedapi/types/rollupjobconfiguration.go +++ b/typedapi/types/rollupjobconfiguration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RollupJobConfiguration type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/get_jobs/types.ts#L34-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/get_jobs/types.ts#L34-L43 type RollupJobConfiguration struct { Cron string `json:"cron"` Groups Groupings `json:"groups"` @@ -98,7 +98,7 @@ func (s *RollupJobConfiguration) UnmarshalJSON(data []byte) error { } case "page_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rollupjobstats.go b/typedapi/types/rollupjobstats.go index 05671ab9a3..ebdc246688 100644 --- a/typedapi/types/rollupjobstats.go +++ b/typedapi/types/rollupjobstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RollupJobStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/get_jobs/types.ts#L45-L58 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/get_jobs/types.ts#L45-L58 type RollupJobStats struct { DocumentsProcessed int64 `json:"documents_processed"` IndexFailures int64 `json:"index_failures"` @@ -63,7 +63,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { switch t { case "documents_processed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -78,7 +78,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { } case "index_failures": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -98,7 +98,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { } case "index_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,7 +113,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { } case "pages_processed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -133,7 +133,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { } case "processing_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -148,7 +148,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { } case "rollups_indexed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -163,7 +163,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { } case "search_failures": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -183,7 +183,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { } case "search_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -198,7 +198,7 @@ func (s *RollupJobStats) UnmarshalJSON(data []byte) error { } case "trigger_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rollupjobstatus.go b/typedapi/types/rollupjobstatus.go index aa5929176f..86e41b23ee 100644 --- a/typedapi/types/rollupjobstatus.go +++ b/typedapi/types/rollupjobstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // RollupJobStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/get_jobs/types.ts#L60-L64 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/get_jobs/types.ts#L60-L64 type RollupJobStatus struct { CurrentPosition map[string]json.RawMessage `json:"current_position,omitempty"` JobState indexingjobstate.IndexingJobState `json:"job_state"` @@ -69,7 +69,7 @@ func (s *RollupJobStatus) UnmarshalJSON(data []byte) error { } case "upgraded_doc_id": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rollupjobsummary.go b/typedapi/types/rollupjobsummary.go index 635c623e49..d2f0c45d5d 100644 --- a/typedapi/types/rollupjobsummary.go +++ b/typedapi/types/rollupjobsummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RollupJobSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/get_rollup_index_caps/types.ts#L28-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/get_rollup_index_caps/types.ts#L28-L33 type RollupJobSummary struct { Fields map[string][]RollupJobSummaryField `json:"fields"` IndexPattern string `json:"index_pattern"` diff --git a/typedapi/types/rollupjobsummaryfield.go b/typedapi/types/rollupjobsummaryfield.go index 520d45ec6a..43b8ce9185 100644 --- a/typedapi/types/rollupjobsummaryfield.go +++ b/typedapi/types/rollupjobsummaryfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RollupJobSummaryField type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/get_rollup_index_caps/types.ts#L35-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/get_rollup_index_caps/types.ts#L35-L39 type RollupJobSummaryField struct { Agg string `json:"agg"` CalendarInterval Duration `json:"calendar_interval,omitempty"` diff --git a/typedapi/types/routingfield.go b/typedapi/types/routingfield.go index 6868a2e178..10a84ca5a0 100644 --- a/typedapi/types/routingfield.go +++ b/typedapi/types/routingfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RoutingField type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/meta-fields.ts#L50-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/meta-fields.ts#L50-L52 type RoutingField struct { Required bool `json:"required"` } @@ -52,7 +52,7 @@ func (s *RoutingField) UnmarshalJSON(data []byte) error { switch t { case "required": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rrfrank.go b/typedapi/types/rrfrank.go index 272006cd19..29eee0dd9f 100644 --- a/typedapi/types/rrfrank.go +++ b/typedapi/types/rrfrank.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RrfRank type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Rank.ts#L32-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Rank.ts#L32-L37 type RrfRank struct { // RankConstant How much influence documents in individual result sets per query have over // the final ranked result set @@ -56,7 +56,7 @@ func (s *RrfRank) UnmarshalJSON(data []byte) error { switch t { case "rank_constant": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *RrfRank) UnmarshalJSON(data []byte) error { } case "window_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rrfretriever.go b/typedapi/types/rrfretriever.go new file mode 100644 index 0000000000..b2307a4b39 --- /dev/null +++ b/typedapi/types/rrfretriever.go @@ -0,0 +1,126 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// RRFRetriever type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Retriever.ts#L73-L80 +type RRFRetriever struct { + // Filter Query to filter the documents that can match. + Filter []Query `json:"filter,omitempty"` + // RankConstant This value determines how much influence documents in individual result sets + // per query have over the final ranked result set. + RankConstant *int `json:"rank_constant,omitempty"` + // RankWindowSize This value determines the size of the individual result sets per query. + RankWindowSize *int `json:"rank_window_size,omitempty"` + // Retrievers A list of child retrievers to specify which sets of returned top documents + // will have the RRF formula applied to them. + Retrievers []RetrieverContainer `json:"retrievers"` +} + +func (s *RRFRetriever) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "filter": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewQuery() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Filter", err) + } + + s.Filter = append(s.Filter, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Filter); err != nil { + return fmt.Errorf("%s | %w", "Filter", err) + } + } + + case "rank_constant": + + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return fmt.Errorf("%s | %w", "RankConstant", err) + } + s.RankConstant = &value + case float64: + f := int(v) + s.RankConstant = &f + } + + case "rank_window_size": + + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return fmt.Errorf("%s | %w", "RankWindowSize", err) + } + s.RankWindowSize = &value + case float64: + f := int(v) + s.RankWindowSize = &f + } + + case "retrievers": + if err := dec.Decode(&s.Retrievers); err != nil { + return fmt.Errorf("%s | %w", "Retrievers", err) + } + + } + } + return nil +} + +// NewRRFRetriever returns a RRFRetriever. +func NewRRFRetriever() *RRFRetriever { + r := &RRFRetriever{} + + return r +} diff --git a/typedapi/types/rulecondition.go b/typedapi/types/rulecondition.go index bf51ece7fe..0918505498 100644 --- a/typedapi/types/rulecondition.go +++ b/typedapi/types/rulecondition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // RuleCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Rule.ts#L52-L65 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Rule.ts#L52-L65 type RuleCondition struct { // AppliesTo Specifies the result property to which the condition applies. If your // detector uses `lat_long`, `metric`, `rare`, or `freq_rare` functions, you can @@ -73,7 +73,7 @@ func (s *RuleCondition) UnmarshalJSON(data []byte) error { } case "value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/rulequery.go b/typedapi/types/rulequery.go index b3b4ed81f3..651bea4090 100644 --- a/typedapi/types/rulequery.go +++ b/typedapi/types/rulequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RuleQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L369-L373 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L381-L385 type RuleQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -42,7 +42,7 @@ type RuleQuery struct { MatchCriteria json.RawMessage `json:"match_criteria,omitempty"` Organic *Query `json:"organic,omitempty"` QueryName_ *string `json:"_name,omitempty"` - RulesetId string `json:"ruleset_id"` + RulesetIds []string `json:"ruleset_ids"` } func (s *RuleQuery) UnmarshalJSON(data []byte) error { @@ -61,7 +61,7 @@ func (s *RuleQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -98,9 +98,9 @@ func (s *RuleQuery) UnmarshalJSON(data []byte) error { } s.QueryName_ = &o - case "ruleset_id": - if err := dec.Decode(&s.RulesetId); err != nil { - return fmt.Errorf("%s | %w", "RulesetId", err) + case "ruleset_ids": + if err := dec.Decode(&s.RulesetIds); err != nil { + return fmt.Errorf("%s | %w", "RulesetIds", err) } } diff --git a/typedapi/types/runningstatesearchinterval.go b/typedapi/types/runningstatesearchinterval.go index 95b6bd1d3f..300e01ca06 100644 --- a/typedapi/types/runningstatesearchinterval.go +++ b/typedapi/types/runningstatesearchinterval.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // RunningStateSearchInterval type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Datafeed.ts#L214-L231 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Datafeed.ts#L213-L230 type RunningStateSearchInterval struct { // End The end time. End Duration `json:"end,omitempty"` diff --git a/typedapi/types/runtimefield.go b/typedapi/types/runtimefield.go index 6ce791e4b3..b389361b2f 100644 --- a/typedapi/types/runtimefield.go +++ b/typedapi/types/runtimefield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // RuntimeField type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/RuntimeFields.ts#L26-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/RuntimeFields.ts#L26-L48 type RuntimeField struct { // FetchFields For type `lookup` FetchFields []RuntimeFieldFetchFields `json:"fetch_fields,omitempty"` diff --git a/typedapi/types/runtimefieldfetchfields.go b/typedapi/types/runtimefieldfetchfields.go index 7106f77452..604b9890e0 100644 --- a/typedapi/types/runtimefieldfetchfields.go +++ b/typedapi/types/runtimefieldfetchfields.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RuntimeFieldFetchFields type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/RuntimeFields.ts#L50-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/RuntimeFields.ts#L50-L54 type RuntimeFieldFetchFields struct { Field string `json:"field"` Format *string `json:"format,omitempty"` diff --git a/typedapi/types/runtimefields.go b/typedapi/types/runtimefields.go index 7df22662db..e14dcacc98 100644 --- a/typedapi/types/runtimefields.go +++ b/typedapi/types/runtimefields.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // RuntimeFields type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/RuntimeFields.ts#L24-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/RuntimeFields.ts#L24-L24 type RuntimeFields map[string]RuntimeField diff --git a/typedapi/types/runtimefieldstype.go b/typedapi/types/runtimefieldstype.go index 5083b95666..8e4c6de0f6 100644 --- a/typedapi/types/runtimefieldstype.go +++ b/typedapi/types/runtimefieldstype.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // RuntimeFieldsType type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L279-L294 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L279-L294 type RuntimeFieldsType struct { CharsMax int64 `json:"chars_max"` CharsTotal int64 `json:"chars_total"` @@ -65,7 +65,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { switch t { case "chars_max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -80,7 +80,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { } case "chars_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { } case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -110,7 +110,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { } case "doc_max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { } case "doc_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -140,7 +140,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { } case "index_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -160,7 +160,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { } case "lines_max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -175,7 +175,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { } case "lines_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -195,7 +195,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { } case "scriptless_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -210,7 +210,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { } case "shadowed_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -225,7 +225,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { } case "source_max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -240,7 +240,7 @@ func (s *RuntimeFieldsType) UnmarshalJSON(data []byte) error { } case "source_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/s3repository.go b/typedapi/types/s3repository.go index 972cedb04a..8880ce39c3 100644 --- a/typedapi/types/s3repository.go +++ b/typedapi/types/s3repository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // S3Repository type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotRepository.ts#L50-L53 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotRepository.ts#L50-L53 type S3Repository struct { Settings S3RepositorySettings `json:"settings"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/s3repositorysettings.go b/typedapi/types/s3repositorysettings.go index fc1182bf63..9ac94a7fd0 100644 --- a/typedapi/types/s3repositorysettings.go +++ b/typedapi/types/s3repositorysettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // S3RepositorySettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotRepository.ts#L93-L102 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotRepository.ts#L93-L102 type S3RepositorySettings struct { BasePath *string `json:"base_path,omitempty"` Bucket string `json:"bucket"` @@ -121,7 +121,7 @@ func (s *S3RepositorySettings) UnmarshalJSON(data []byte) error { s.Client = &o case "compress": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -145,7 +145,7 @@ func (s *S3RepositorySettings) UnmarshalJSON(data []byte) error { } case "readonly": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -159,7 +159,7 @@ func (s *S3RepositorySettings) UnmarshalJSON(data []byte) error { } case "server_side_encryption": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/samplediversity.go b/typedapi/types/samplediversity.go index 5253824f73..1a02359fd9 100644 --- a/typedapi/types/samplediversity.go +++ b/typedapi/types/samplediversity.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SampleDiversity type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/graph/_types/ExploreControls.ts#L51-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/graph/_types/ExploreControls.ts#L51-L54 type SampleDiversity struct { Field string `json:"field"` MaxDocsPerValue int `json:"max_docs_per_value"` @@ -59,7 +59,7 @@ func (s *SampleDiversity) UnmarshalJSON(data []byte) error { case "max_docs_per_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/sampleraggregate.go b/typedapi/types/sampleraggregate.go index 138f625a57..ec611912b9 100644 --- a/typedapi/types/sampleraggregate.go +++ b/typedapi/types/sampleraggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // SamplerAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L498-L499 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L502-L503 type SamplerAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -55,7 +55,7 @@ func (s *SamplerAggregate) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +519,7 @@ func (s *SamplerAggregate) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -569,7 +569,7 @@ func (s *SamplerAggregate) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -579,7 +579,7 @@ func (s *SamplerAggregate) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -596,7 +596,7 @@ func (s *SamplerAggregate) UnmarshalJSON(data []byte) error { func (s SamplerAggregate) MarshalJSON() ([]byte, error) { type opt SamplerAggregate // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/sampleraggregation.go b/typedapi/types/sampleraggregation.go index 4e4d0c1bf5..337e7fd6e9 100644 --- a/typedapi/types/sampleraggregation.go +++ b/typedapi/types/sampleraggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,10 +31,8 @@ import ( // SamplerAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L727-L733 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L729-L735 type SamplerAggregation struct { - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` // ShardSize Limits how many top-scoring documents are collected in the sample processed // on each shard. ShardSize *int `json:"shard_size,omitempty"` @@ -55,26 +53,9 @@ func (s *SamplerAggregation) UnmarshalJSON(data []byte) error { switch t { - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "shard_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/scalarvalue.go b/typedapi/types/scalarvalue.go index d95ef8da82..1153d87569 100644 --- a/typedapi/types/scalarvalue.go +++ b/typedapi/types/scalarvalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -28,5 +28,5 @@ package types // bool // nil // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L39-L43 -type ScalarValue interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L39-L43 +type ScalarValue any diff --git a/typedapi/types/scaledfloatnumberproperty.go b/typedapi/types/scaledfloatnumberproperty.go index 1f5a4e6286..a495c5e474 100644 --- a/typedapi/types/scaledfloatnumberproperty.go +++ b/typedapi/types/scaledfloatnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // ScaledFloatNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L174-L178 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L182-L186 type ScaledFloatNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -80,7 +80,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -126,7 +126,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -151,7 +151,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -172,7 +172,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -262,12 +262,6 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -286,6 +280,18 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -442,6 +448,12 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -453,7 +465,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -468,7 +480,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -482,7 +494,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -504,7 +516,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "null_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -531,7 +543,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -552,7 +564,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -642,12 +654,6 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -666,6 +672,18 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -822,6 +840,12 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -832,7 +856,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "scaling_factor": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -896,7 +920,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -910,7 +934,7 @@ func (s *ScaledFloatNumberProperty) UnmarshalJSON(data []byte) error { } case "time_series_dimension": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/schedulecontainer.go b/typedapi/types/schedulecontainer.go index 7b96f84653..15caa89987 100644 --- a/typedapi/types/schedulecontainer.go +++ b/typedapi/types/schedulecontainer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ScheduleContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Schedule.ts#L80-L91 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Schedule.ts#L80-L91 type ScheduleContainer struct { Cron *string `json:"cron,omitempty"` Daily *DailySchedule `json:"daily,omitempty"` diff --git a/typedapi/types/scheduletimeofday.go b/typedapi/types/scheduletimeofday.go index 611c981600..be481e180e 100644 --- a/typedapi/types/scheduletimeofday.go +++ b/typedapi/types/scheduletimeofday.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // string // HourAndMinute // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Schedule.ts#L98-L103 -type ScheduleTimeOfDay interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Schedule.ts#L98-L103 +type ScheduleTimeOfDay any diff --git a/typedapi/types/scheduletriggerevent.go b/typedapi/types/scheduletriggerevent.go index 30284bd5d7..5814082f29 100644 --- a/typedapi/types/scheduletriggerevent.go +++ b/typedapi/types/scheduletriggerevent.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ScheduleTriggerEvent type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Schedule.ts#L93-L96 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Schedule.ts#L93-L96 type ScheduleTriggerEvent struct { ScheduledTime DateTime `json:"scheduled_time"` TriggeredTime DateTime `json:"triggered_time,omitempty"` diff --git a/typedapi/types/scoresort.go b/typedapi/types/scoresort.go index 4dfcb2de13..11792e2c4e 100644 --- a/typedapi/types/scoresort.go +++ b/typedapi/types/scoresort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // ScoreSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/sort.ts#L55-L57 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/sort.ts#L55-L57 type ScoreSort struct { Order *sortorder.SortOrder `json:"order,omitempty"` } diff --git a/typedapi/types/script.go b/typedapi/types/script.go index 1ae2d7c38e..207d1e1dda 100644 --- a/typedapi/types/script.go +++ b/typedapi/types/script.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // InlineScript // StoredScriptId // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Scripting.ts#L88-L89 -type Script interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Scripting.ts#L88-L89 +type Script any diff --git a/typedapi/types/scriptcache.go b/typedapi/types/scriptcache.go index 2f826d9cd0..72799e0b19 100644 --- a/typedapi/types/scriptcache.go +++ b/typedapi/types/scriptcache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ScriptCache type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L1031-L1045 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L1031-L1045 type ScriptCache struct { // CacheEvictions Total number of times the script cache has evicted old data. CacheEvictions *int64 `json:"cache_evictions,omitempty"` @@ -59,7 +59,7 @@ func (s *ScriptCache) UnmarshalJSON(data []byte) error { switch t { case "cache_evictions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -74,7 +74,7 @@ func (s *ScriptCache) UnmarshalJSON(data []byte) error { } case "compilation_limit_triggered": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -89,7 +89,7 @@ func (s *ScriptCache) UnmarshalJSON(data []byte) error { } case "compilations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/scriptcondition.go b/typedapi/types/scriptcondition.go index b6734c98bd..aed1f4c40e 100644 --- a/typedapi/types/scriptcondition.go +++ b/typedapi/types/scriptcondition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ScriptCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Conditions.ts#L76-L84 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Conditions.ts#L79-L87 type ScriptCondition struct { Id *string `json:"id,omitempty"` Lang *string `json:"lang,omitempty"` diff --git a/typedapi/types/scriptedheuristic.go b/typedapi/types/scriptedheuristic.go index 607f931b62..5b4f53827c 100644 --- a/typedapi/types/scriptedheuristic.go +++ b/typedapi/types/scriptedheuristic.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ScriptedHeuristic type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L766-L768 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L768-L770 type ScriptedHeuristic struct { Script Script `json:"script"` } diff --git a/typedapi/types/scriptedmetricaggregate.go b/typedapi/types/scriptedmetricaggregate.go index 0a3bac814e..f90cdc496d 100644 --- a/typedapi/types/scriptedmetricaggregate.go +++ b/typedapi/types/scriptedmetricaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ScriptedMetricAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L649-L652 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L653-L656 type ScriptedMetricAggregate struct { Meta Metadata `json:"meta,omitempty"` Value json.RawMessage `json:"value,omitempty"` diff --git a/typedapi/types/scriptedmetricaggregation.go b/typedapi/types/scriptedmetricaggregation.go index 70a7edb53e..2c29b42856 100644 --- a/typedapi/types/scriptedmetricaggregation.go +++ b/typedapi/types/scriptedmetricaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ScriptedMetricAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L254-L280 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L254-L280 type ScriptedMetricAggregation struct { // CombineScript Runs once on each shard after document collection is complete. // Allows the aggregation to consolidate the state returned from each shard. diff --git a/typedapi/types/scriptfield.go b/typedapi/types/scriptfield.go index b4c6805ed5..c582b999c1 100644 --- a/typedapi/types/scriptfield.go +++ b/typedapi/types/scriptfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ScriptField type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Scripting.ts#L91-L94 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Scripting.ts#L91-L94 type ScriptField struct { IgnoreFailure *bool `json:"ignore_failure,omitempty"` Script Script `json:"script"` @@ -53,7 +53,7 @@ func (s *ScriptField) UnmarshalJSON(data []byte) error { switch t { case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/scripting.go b/typedapi/types/scripting.go index b098cb7f27..3ec1bbffec 100644 --- a/typedapi/types/scripting.go +++ b/typedapi/types/scripting.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Scripting type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L977-L995 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L977-L995 type Scripting struct { // CacheEvictions Total number of times the script cache has evicted old data. CacheEvictions *int64 `json:"cache_evictions,omitempty"` @@ -61,7 +61,7 @@ func (s *Scripting) UnmarshalJSON(data []byte) error { switch t { case "cache_evictions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -76,7 +76,7 @@ func (s *Scripting) UnmarshalJSON(data []byte) error { } case "compilation_limit_triggered": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *Scripting) UnmarshalJSON(data []byte) error { } case "compilations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/scriptprocessor.go b/typedapi/types/scriptprocessor.go index 1dca8017f1..41d8980d46 100644 --- a/typedapi/types/scriptprocessor.go +++ b/typedapi/types/scriptprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ScriptProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L1001-L1021 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L1005-L1025 type ScriptProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -102,7 +102,7 @@ func (s *ScriptProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/scriptquery.go b/typedapi/types/scriptquery.go index 3884250aa8..088d327a0e 100644 --- a/typedapi/types/scriptquery.go +++ b/typedapi/types/scriptquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ScriptQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L318-L324 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L327-L333 type ScriptQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -61,7 +61,7 @@ func (s *ScriptQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/scriptscorefunction.go b/typedapi/types/scriptscorefunction.go index b50ae04603..6371ec9ff8 100644 --- a/typedapi/types/scriptscorefunction.go +++ b/typedapi/types/scriptscorefunction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ScriptScoreFunction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/compound.ts#L120-L125 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L121-L126 type ScriptScoreFunction struct { // Script A script that computes a score. Script Script `json:"script"` diff --git a/typedapi/types/scriptscorequery.go b/typedapi/types/scriptscorequery.go index 8bc4ad0af0..1287b533ac 100644 --- a/typedapi/types/scriptscorequery.go +++ b/typedapi/types/scriptscorequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ScriptScoreQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L326-L340 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L335-L349 type ScriptScoreQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -67,7 +67,7 @@ func (s *ScriptScoreQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *ScriptScoreQuery) UnmarshalJSON(data []byte) error { } case "min_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/scriptsort.go b/typedapi/types/scriptsort.go index e06276dacf..9e670d1f11 100644 --- a/typedapi/types/scriptsort.go +++ b/typedapi/types/scriptsort.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // ScriptSort type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/sort.ts#L68-L74 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/sort.ts#L72-L78 type ScriptSort struct { Mode *sortmode.SortMode `json:"mode,omitempty"` Nested *NestedSortValue `json:"nested,omitempty"` diff --git a/typedapi/types/scripttransform.go b/typedapi/types/scripttransform.go index 7855434c5c..9013593d80 100644 --- a/typedapi/types/scripttransform.go +++ b/typedapi/types/scripttransform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ScriptTransform type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Transform.ts#L36-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Transform.ts#L36-L44 type ScriptTransform struct { Id *string `json:"id,omitempty"` Lang *string `json:"lang,omitempty"` diff --git a/typedapi/types/scrollids.go b/typedapi/types/scrollids.go index 5175c1a8b6..8a7889c37d 100644 --- a/typedapi/types/scrollids.go +++ b/typedapi/types/scrollids.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ScrollIds type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L56-L56 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L56-L56 type ScrollIds []string diff --git a/typedapi/types/searchablesnapshots.go b/typedapi/types/searchablesnapshots.go index b82a0cf1c9..a61a01fcaf 100644 --- a/typedapi/types/searchablesnapshots.go +++ b/typedapi/types/searchablesnapshots.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SearchableSnapshots type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L428-L432 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L428-L432 type SearchableSnapshots struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -56,7 +56,7 @@ func (s *SearchableSnapshots) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *SearchableSnapshots) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *SearchableSnapshots) UnmarshalJSON(data []byte) error { case "full_copy_indices_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -101,7 +101,7 @@ func (s *SearchableSnapshots) UnmarshalJSON(data []byte) error { case "indices_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -117,7 +117,7 @@ func (s *SearchableSnapshots) UnmarshalJSON(data []byte) error { case "shared_cache_indices_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/searchapplication.go b/typedapi/types/searchapplication.go index 664c4cd2ec..1dc6083e3f 100644 --- a/typedapi/types/searchapplication.go +++ b/typedapi/types/searchapplication.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SearchApplication type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/search_application/_types/SearchApplication.ts#L24-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/search_application/_types/SearchApplication.ts#L24-L45 type SearchApplication struct { // AnalyticsCollectionName Analytics collection associated to the Search Application. AnalyticsCollectionName *string `json:"analytics_collection_name,omitempty"` diff --git a/typedapi/types/searchapplicationlistitem.go b/typedapi/types/searchapplicationlistitem.go index 9f2ae0fecd..9f415e659a 100644 --- a/typedapi/types/searchapplicationlistitem.go +++ b/typedapi/types/searchapplicationlistitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SearchApplicationListItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/search_application/list/SearchApplicationsListResponse.ts#L31-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/search_application/list/SearchApplicationsListResponse.ts#L31-L48 type SearchApplicationListItem struct { // AnalyticsCollectionName Analytics collection associated to the Search Application AnalyticsCollectionName *string `json:"analytics_collection_name,omitempty"` diff --git a/typedapi/types/searchapplicationtemplate.go b/typedapi/types/searchapplicationtemplate.go index 686b7953e0..0bb4a8604d 100644 --- a/typedapi/types/searchapplicationtemplate.go +++ b/typedapi/types/searchapplicationtemplate.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // SearchApplicationTemplate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/search_application/_types/SearchApplication.ts#L47-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/search_application/_types/SearchApplication.ts#L47-L52 type SearchApplicationTemplate struct { // Script The associated mustache template. Script InlineScript `json:"script"` diff --git a/typedapi/types/searchasyoutypeproperty.go b/typedapi/types/searchasyoutypeproperty.go index 66a467e4e0..1cea3035ac 100644 --- a/typedapi/types/searchasyoutypeproperty.go +++ b/typedapi/types/searchasyoutypeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // SearchAsYouTypeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L198-L208 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L212-L222 type SearchAsYouTypeProperty struct { Analyzer *string `json:"analyzer,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -112,7 +112,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -133,7 +133,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -223,12 +223,6 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -247,6 +241,18 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -403,6 +409,12 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -414,7 +426,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -429,7 +441,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -449,7 +461,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { case "max_shingle_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -472,7 +484,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { } case "norms": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -492,7 +504,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -513,7 +525,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -603,12 +615,6 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -627,6 +633,18 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -783,6 +801,12 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -829,7 +853,7 @@ func (s *SearchAsYouTypeProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/searchidle.go b/typedapi/types/searchidle.go index 345a6e8eea..513021c501 100644 --- a/typedapi/types/searchidle.go +++ b/typedapi/types/searchidle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SearchIdle type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L243-L246 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L245-L248 type SearchIdle struct { After Duration `json:"after,omitempty"` } diff --git a/typedapi/types/searchinput.go b/typedapi/types/searchinput.go index cf166fe081..1b49e62227 100644 --- a/typedapi/types/searchinput.go +++ b/typedapi/types/searchinput.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SearchInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Input.ts#L112-L116 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Input.ts#L112-L116 type SearchInput struct { Extract []string `json:"extract,omitempty"` Request SearchInputRequestDefinition `json:"request"` diff --git a/typedapi/types/searchinputrequestbody.go b/typedapi/types/searchinputrequestbody.go index 89c6cdaefa..4c5adc7165 100644 --- a/typedapi/types/searchinputrequestbody.go +++ b/typedapi/types/searchinputrequestbody.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // SearchInputRequestBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Input.ts#L147-L149 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Input.ts#L147-L149 type SearchInputRequestBody struct { Query Query `json:"query"` } diff --git a/typedapi/types/searchinputrequestdefinition.go b/typedapi/types/searchinputrequestdefinition.go index 5fcd28f35c..a255d54d7a 100644 --- a/typedapi/types/searchinputrequestdefinition.go +++ b/typedapi/types/searchinputrequestdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // SearchInputRequestDefinition type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Input.ts#L118-L125 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Input.ts#L118-L125 type SearchInputRequestDefinition struct { Body *SearchInputRequestBody `json:"body,omitempty"` Indices []string `json:"indices,omitempty"` @@ -74,7 +74,7 @@ func (s *SearchInputRequestDefinition) UnmarshalJSON(data []byte) error { } case "rest_total_hits_as_int": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/searchprofile.go b/typedapi/types/searchprofile.go index f43d7aded9..b1b6db162e 100644 --- a/typedapi/types/searchprofile.go +++ b/typedapi/types/searchprofile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SearchProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/profile.ts#L126-L130 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/profile.ts#L126-L130 type SearchProfile struct { Collector []Collector `json:"collector"` Query []QueryProfile `json:"query"` @@ -64,7 +64,7 @@ func (s *SearchProfile) UnmarshalJSON(data []byte) error { } case "rewrite_time": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/searchstats.go b/typedapi/types/searchstats.go index 09983172b2..263485ed97 100644 --- a/typedapi/types/searchstats.go +++ b/typedapi/types/searchstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SearchStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L252-L271 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L252-L271 type SearchStats struct { FetchCurrent int64 `json:"fetch_current"` FetchTime Duration `json:"fetch_time,omitempty"` @@ -69,7 +69,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { switch t { case "fetch_current": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -94,7 +94,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { } case "fetch_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -117,7 +117,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { } case "open_contexts": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -132,7 +132,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { } case "query_current": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -157,7 +157,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { } case "query_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -172,7 +172,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { } case "scroll_current": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -197,7 +197,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { } case "scroll_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -212,7 +212,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { } case "suggest_current": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -237,7 +237,7 @@ func (s *SearchStats) UnmarshalJSON(data []byte) error { } case "suggest_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/searchtemplaterequestbody.go b/typedapi/types/searchtemplaterequestbody.go index c69b3111c9..0242e9703c 100644 --- a/typedapi/types/searchtemplaterequestbody.go +++ b/typedapi/types/searchtemplaterequestbody.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SearchTemplateRequestBody type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Input.ts#L128-L145 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Input.ts#L128-L145 type SearchTemplateRequestBody struct { Explain *bool `json:"explain,omitempty"` // Id ID of the search template to use. If no source is specified, @@ -61,7 +61,7 @@ func (s *SearchTemplateRequestBody) UnmarshalJSON(data []byte) error { switch t { case "explain": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *SearchTemplateRequestBody) UnmarshalJSON(data []byte) error { } case "profile": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/searchtransform.go b/typedapi/types/searchtransform.go index 9b921b8627..a4c719a87b 100644 --- a/typedapi/types/searchtransform.go +++ b/typedapi/types/searchtransform.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SearchTransform type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Transform.ts#L46-L49 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Transform.ts#L46-L49 type SearchTransform struct { Request SearchInputRequestDefinition `json:"request"` Timeout Duration `json:"timeout"` diff --git a/typedapi/types/security.go b/typedapi/types/security.go index cac2134b98..8950ff4b20 100644 --- a/typedapi/types/security.go +++ b/typedapi/types/security.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Security type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L434-L447 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L434-L447 type Security struct { Anonymous FeatureToggle `json:"anonymous"` ApiKeyService FeatureToggle `json:"api_key_service"` @@ -80,7 +80,7 @@ func (s *Security) UnmarshalJSON(data []byte) error { } case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -94,7 +94,7 @@ func (s *Security) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/securityrolemapping.go b/typedapi/types/securityrolemapping.go index 58bf3c7c5f..fc878e36d2 100644 --- a/typedapi/types/securityrolemapping.go +++ b/typedapi/types/securityrolemapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,12 +31,12 @@ import ( // SecurityRoleMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/RoleMapping.ts#L25-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/RoleMapping.ts#L25-L33 type SecurityRoleMapping struct { Enabled bool `json:"enabled"` Metadata Metadata `json:"metadata"` RoleTemplates []RoleTemplate `json:"role_templates,omitempty"` - Roles []string `json:"roles"` + Roles []string `json:"roles,omitempty"` Rules RoleMappingRule `json:"rules"` } @@ -56,7 +56,7 @@ func (s *SecurityRoleMapping) UnmarshalJSON(data []byte) error { switch t { case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/securityroles.go b/typedapi/types/securityroles.go index 3c624241fb..f6ddd0d6ca 100644 --- a/typedapi/types/securityroles.go +++ b/typedapi/types/securityroles.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // SecurityRoles type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L296-L300 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L296-L300 type SecurityRoles struct { Dls SecurityRolesDls `json:"dls"` File SecurityRolesFile `json:"file"` diff --git a/typedapi/types/securityrolesdls.go b/typedapi/types/securityrolesdls.go index 957794ccc3..2ae6e87393 100644 --- a/typedapi/types/securityrolesdls.go +++ b/typedapi/types/securityrolesdls.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // SecurityRolesDls type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L308-L310 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L308-L310 type SecurityRolesDls struct { BitSetCache SecurityRolesDlsBitSetCache `json:"bit_set_cache"` } diff --git a/typedapi/types/securityrolesdlsbitsetcache.go b/typedapi/types/securityrolesdlsbitsetcache.go index 2afbf9aaeb..976181f9ad 100644 --- a/typedapi/types/securityrolesdlsbitsetcache.go +++ b/typedapi/types/securityrolesdlsbitsetcache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SecurityRolesDlsBitSetCache type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L312-L316 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L312-L316 type SecurityRolesDlsBitSetCache struct { Count int `json:"count"` Memory ByteSize `json:"memory,omitempty"` @@ -55,7 +55,7 @@ func (s *SecurityRolesDlsBitSetCache) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/securityrolesfile.go b/typedapi/types/securityrolesfile.go index ec620c3ce5..6325d831cb 100644 --- a/typedapi/types/securityrolesfile.go +++ b/typedapi/types/securityrolesfile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SecurityRolesFile type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L318-L322 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L318-L322 type SecurityRolesFile struct { Dls bool `json:"dls"` Fls bool `json:"fls"` @@ -54,7 +54,7 @@ func (s *SecurityRolesFile) UnmarshalJSON(data []byte) error { switch t { case "dls": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -68,7 +68,7 @@ func (s *SecurityRolesFile) UnmarshalJSON(data []byte) error { } case "fls": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -82,7 +82,7 @@ func (s *SecurityRolesFile) UnmarshalJSON(data []byte) error { } case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/securityrolesnative.go b/typedapi/types/securityrolesnative.go index 7e7d00e9ce..b340972d9d 100644 --- a/typedapi/types/securityrolesnative.go +++ b/typedapi/types/securityrolesnative.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SecurityRolesNative type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L302-L306 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L302-L306 type SecurityRolesNative struct { Dls bool `json:"dls"` Fls bool `json:"fls"` @@ -54,7 +54,7 @@ func (s *SecurityRolesNative) UnmarshalJSON(data []byte) error { switch t { case "dls": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -68,7 +68,7 @@ func (s *SecurityRolesNative) UnmarshalJSON(data []byte) error { } case "fls": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -82,7 +82,7 @@ func (s *SecurityRolesNative) UnmarshalJSON(data []byte) error { } case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/segment.go b/typedapi/types/segment.go index d8742d66b8..9a65e04c86 100644 --- a/typedapi/types/segment.go +++ b/typedapi/types/segment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Segment type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/segments/types.ts#L28-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/segments/types.ts#L28-L38 type Segment struct { Attributes map[string]string `json:"attributes"` Committed bool `json:"committed"` @@ -68,7 +68,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { } case "committed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -82,7 +82,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { } case "compound": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { } case "deleted_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -112,7 +112,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { case "generation": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -127,7 +127,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { } case "num_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -142,7 +142,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { } case "search": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -156,7 +156,7 @@ func (s *Segment) UnmarshalJSON(data []byte) error { } case "size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/segmentsrecord.go b/typedapi/types/segmentsrecord.go index b45b99c0c5..c4f3a82926 100644 --- a/typedapi/types/segmentsrecord.go +++ b/typedapi/types/segmentsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SegmentsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/segments/types.ts#L22-L107 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/segments/types.ts#L22-L107 type SegmentsRecord struct { // Committed If `true`, the segment is synced to disk. // Segments that are synced can survive a hard reboot. diff --git a/typedapi/types/segmentsstats.go b/typedapi/types/segmentsstats.go index 57a4999664..09591437cf 100644 --- a/typedapi/types/segmentsstats.go +++ b/typedapi/types/segmentsstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SegmentsStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L273-L366 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L273-L366 type SegmentsStats struct { // Count Total number of segments across all shards assigned to selected nodes. Count int `json:"count"` @@ -121,7 +121,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -141,7 +141,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { } case "doc_values_memory_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -169,7 +169,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { } case "fixed_bit_set_memory_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -184,7 +184,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { } case "index_writer_max_memory_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -204,7 +204,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { } case "index_writer_memory_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -219,7 +219,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { } case "max_unsafe_auto_id_timestamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -239,7 +239,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { } case "memory_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -259,7 +259,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { } case "norms_memory_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -279,7 +279,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { } case "points_memory_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -294,7 +294,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { } case "stored_fields_memory_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -314,7 +314,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { } case "term_vectors_memory_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -339,7 +339,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { } case "terms_memory_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -359,7 +359,7 @@ func (s *SegmentsStats) UnmarshalJSON(data []byte) error { } case "version_map_memory_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/semanticquery.go b/typedapi/types/semanticquery.go new file mode 100644 index 0000000000..21e54212c4 --- /dev/null +++ b/typedapi/types/semanticquery.go @@ -0,0 +1,126 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// SemanticQuery type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/SemanticQuery.ts#L22-L27 +type SemanticQuery struct { + // Boost Floating point number used to decrease or increase the relevance scores of + // the query. + // Boost values are relative to the default value of 1.0. + // A boost value between 0 and 1.0 decreases the relevance score. + // A value greater than 1.0 increases the relevance score. + Boost *float32 `json:"boost,omitempty"` + // Field The field to query, which must be a semantic_text field type + Field string `json:"field"` + // Query The query text + Query string `json:"query"` + QueryName_ *string `json:"_name,omitempty"` +} + +func (s *SemanticQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return fmt.Errorf("%s | %w", "Boost", err) + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "field": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Field", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Field = o + + case "query": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Query", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Query = o + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "QueryName_", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.QueryName_ = &o + + } + } + return nil +} + +// NewSemanticQuery returns a SemanticQuery. +func NewSemanticQuery() *SemanticQuery { + r := &SemanticQuery{} + + return r +} diff --git a/typedapi/types/semantictextproperty.go b/typedapi/types/semantictextproperty.go new file mode 100644 index 0000000000..bf1719e301 --- /dev/null +++ b/typedapi/types/semantictextproperty.go @@ -0,0 +1,99 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" +) + +// SemanticTextProperty type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L206-L210 +type SemanticTextProperty struct { + InferenceId string `json:"inference_id"` + Meta map[string]string `json:"meta,omitempty"` + Type string `json:"type,omitempty"` +} + +func (s *SemanticTextProperty) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "inference_id": + if err := dec.Decode(&s.InferenceId); err != nil { + return fmt.Errorf("%s | %w", "InferenceId", err) + } + + case "meta": + if s.Meta == nil { + s.Meta = make(map[string]string, 0) + } + if err := dec.Decode(&s.Meta); err != nil { + return fmt.Errorf("%s | %w", "Meta", err) + } + + case "type": + if err := dec.Decode(&s.Type); err != nil { + return fmt.Errorf("%s | %w", "Type", err) + } + + } + } + return nil +} + +// MarshalJSON override marshalling to include literal value +func (s SemanticTextProperty) MarshalJSON() ([]byte, error) { + type innerSemanticTextProperty SemanticTextProperty + tmp := innerSemanticTextProperty{ + InferenceId: s.InferenceId, + Meta: s.Meta, + Type: s.Type, + } + + tmp.Type = "semantic_text" + + return json.Marshal(tmp) +} + +// NewSemanticTextProperty returns a SemanticTextProperty. +func NewSemanticTextProperty() *SemanticTextProperty { + r := &SemanticTextProperty{ + Meta: make(map[string]string, 0), + } + + return r +} diff --git a/typedapi/types/serialdifferencingaggregation.go b/typedapi/types/serialdifferencingaggregation.go index 576173f986..c34b28af25 100644 --- a/typedapi/types/serialdifferencingaggregation.go +++ b/typedapi/types/serialdifferencingaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // SerialDifferencingAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L361-L367 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L361-L367 type SerialDifferencingAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -45,9 +45,7 @@ type SerialDifferencingAggregation struct { GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` // Lag The historical bucket to subtract from the current value. // Must be a positive, non-zero integer. - Lag *int `json:"lag,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + Lag *int `json:"lag,omitempty"` } func (s *SerialDifferencingAggregation) UnmarshalJSON(data []byte) error { @@ -89,7 +87,7 @@ func (s *SerialDifferencingAggregation) UnmarshalJSON(data []byte) error { case "lag": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -103,23 +101,6 @@ func (s *SerialDifferencingAggregation) UnmarshalJSON(data []byte) error { s.Lag = &f } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/serializedclusterstate.go b/typedapi/types/serializedclusterstate.go index 1ed7ef6d63..901ca7839e 100644 --- a/typedapi/types/serializedclusterstate.go +++ b/typedapi/types/serializedclusterstate.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // SerializedClusterState type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L232-L238 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L232-L238 type SerializedClusterState struct { Diffs *SerializedClusterStateDetail `json:"diffs,omitempty"` // FullStates Number of published cluster states. diff --git a/typedapi/types/serializedclusterstatedetail.go b/typedapi/types/serializedclusterstatedetail.go index 16e24d1400..08347b2141 100644 --- a/typedapi/types/serializedclusterstatedetail.go +++ b/typedapi/types/serializedclusterstatedetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SerializedClusterStateDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L240-L246 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L240-L246 type SerializedClusterStateDetail struct { CompressedSize *string `json:"compressed_size,omitempty"` CompressedSizeInBytes *int64 `json:"compressed_size_in_bytes,omitempty"` @@ -68,7 +68,7 @@ func (s *SerializedClusterStateDetail) UnmarshalJSON(data []byte) error { s.CompressedSize = &o case "compressed_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -83,7 +83,7 @@ func (s *SerializedClusterStateDetail) UnmarshalJSON(data []byte) error { } case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -110,7 +110,7 @@ func (s *SerializedClusterStateDetail) UnmarshalJSON(data []byte) error { s.UncompressedSize = &o case "uncompressed_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/servicetoken.go b/typedapi/types/servicetoken.go index 3409dcb582..88bf0229c8 100644 --- a/typedapi/types/servicetoken.go +++ b/typedapi/types/servicetoken.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ServiceToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/create_service_token/types.ts#L22-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/create_service_token/types.ts#L22-L25 type ServiceToken struct { Name string `json:"name"` Value string `json:"value"` diff --git a/typedapi/types/setprocessor.go b/typedapi/types/setprocessor.go index bda8d2af8c..6095834363 100644 --- a/typedapi/types/setprocessor.go +++ b/typedapi/types/setprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SetProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L1023-L1057 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L1027-L1061 type SetProcessor struct { // CopyFrom The origin field which will be copied to `field`, cannot set `value` // simultaneously. @@ -121,7 +121,7 @@ func (s *SetProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_empty_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -135,7 +135,7 @@ func (s *SetProcessor) UnmarshalJSON(data []byte) error { } case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -166,7 +166,7 @@ func (s *SetProcessor) UnmarshalJSON(data []byte) error { } case "override": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/setsecurityuserprocessor.go b/typedapi/types/setsecurityuserprocessor.go index e6df0760e5..62f30760f9 100644 --- a/typedapi/types/setsecurityuserprocessor.go +++ b/typedapi/types/setsecurityuserprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SetSecurityUserProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L1059-L1068 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L1063-L1072 type SetSecurityUserProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -96,7 +96,7 @@ func (s *SetSecurityUserProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/settings.go b/typedapi/types/settings.go index 097005ba65..457d17b8f2 100644 --- a/typedapi/types/settings.go +++ b/typedapi/types/settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Settings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/_types/Transform.ts#L98-L144 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/_types/Transform.ts#L98-L144 type Settings struct { // AlignCheckpoints Specifies whether the transform checkpoint ranges should be optimized for // performance. Such optimization can align @@ -84,7 +84,7 @@ func (s *Settings) UnmarshalJSON(data []byte) error { switch t { case "align_checkpoints": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -98,7 +98,7 @@ func (s *Settings) UnmarshalJSON(data []byte) error { } case "dates_as_epoch_millis": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -112,7 +112,7 @@ func (s *Settings) UnmarshalJSON(data []byte) error { } case "deduce_mappings": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -126,7 +126,7 @@ func (s *Settings) UnmarshalJSON(data []byte) error { } case "docs_per_second": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -143,7 +143,7 @@ func (s *Settings) UnmarshalJSON(data []byte) error { case "max_page_search_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -158,7 +158,7 @@ func (s *Settings) UnmarshalJSON(data []byte) error { } case "unattended": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/settingsanalyze.go b/typedapi/types/settingsanalyze.go index b5675d8e86..3cfd868cc3 100644 --- a/typedapi/types/settingsanalyze.go +++ b/typedapi/types/settingsanalyze.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SettingsAnalyze type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L233-L236 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L235-L238 type SettingsAnalyze struct { MaxTokenCount Stringifiedinteger `json:"max_token_count,omitempty"` } diff --git a/typedapi/types/settingshighlight.go b/typedapi/types/settingshighlight.go index f4ddd4abfe..1f88f32ddb 100644 --- a/typedapi/types/settingshighlight.go +++ b/typedapi/types/settingshighlight.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SettingsHighlight type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L228-L231 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L230-L233 type SettingsHighlight struct { MaxAnalyzedOffset *int `json:"max_analyzed_offset,omitempty"` } @@ -53,7 +53,7 @@ func (s *SettingsHighlight) UnmarshalJSON(data []byte) error { case "max_analyzed_offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/settingsquerystring.go b/typedapi/types/settingsquerystring.go index 63b532bd64..351bccf1b7 100644 --- a/typedapi/types/settingsquerystring.go +++ b/typedapi/types/settingsquerystring.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SettingsQueryString type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L248-L250 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L250-L252 type SettingsQueryString struct { Lenient Stringifiedboolean `json:"lenient"` } diff --git a/typedapi/types/settingssearch.go b/typedapi/types/settingssearch.go index 640b19addc..45d61e5fa3 100644 --- a/typedapi/types/settingssearch.go +++ b/typedapi/types/settingssearch.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // SettingsSearch type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L238-L241 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L240-L243 type SettingsSearch struct { Idle *SearchIdle `json:"idle,omitempty"` Slowlog *SlowlogSettings `json:"slowlog,omitempty"` diff --git a/typedapi/types/settingssimilarity.go b/typedapi/types/settingssimilarity.go index 7d55c52baf..479f4537eb 100644 --- a/typedapi/types/settingssimilarity.go +++ b/typedapi/types/settingssimilarity.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,5 +31,5 @@ package types // SettingsSimilarityLmj // SettingsSimilarityScripted // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L169-L180 -type SettingsSimilarity interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L171-L182 +type SettingsSimilarity any diff --git a/typedapi/types/settingssimilaritybm25.go b/typedapi/types/settingssimilaritybm25.go index da697fae0e..04d32758c2 100644 --- a/typedapi/types/settingssimilaritybm25.go +++ b/typedapi/types/settingssimilaritybm25.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SettingsSimilarityBm25 type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L186-L191 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L188-L193 type SettingsSimilarityBm25 struct { B *Float64 `json:"b,omitempty"` DiscountOverlaps *bool `json:"discount_overlaps,omitempty"` @@ -55,7 +55,7 @@ func (s *SettingsSimilarityBm25) UnmarshalJSON(data []byte) error { switch t { case "b": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *SettingsSimilarityBm25) UnmarshalJSON(data []byte) error { } case "discount_overlaps": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *SettingsSimilarityBm25) UnmarshalJSON(data []byte) error { } case "k1": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/settingssimilarityboolean.go b/typedapi/types/settingssimilarityboolean.go index ffacb10c66..047a7cd54e 100644 --- a/typedapi/types/settingssimilarityboolean.go +++ b/typedapi/types/settingssimilarityboolean.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // SettingsSimilarityBoolean type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L182-L184 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L184-L186 type SettingsSimilarityBoolean struct { Type string `json:"type,omitempty"` } diff --git a/typedapi/types/settingssimilaritydfi.go b/typedapi/types/settingssimilaritydfi.go index c4cf411032..126b0fa4b8 100644 --- a/typedapi/types/settingssimilaritydfi.go +++ b/typedapi/types/settingssimilaritydfi.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -28,7 +28,7 @@ import ( // SettingsSimilarityDfi type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L193-L196 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L195-L198 type SettingsSimilarityDfi struct { IndependenceMeasure dfiindependencemeasure.DFIIndependenceMeasure `json:"independence_measure"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/settingssimilaritydfr.go b/typedapi/types/settingssimilaritydfr.go index 6b2f352285..e0b5e88693 100644 --- a/typedapi/types/settingssimilaritydfr.go +++ b/typedapi/types/settingssimilaritydfr.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SettingsSimilarityDfr type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L198-L203 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L200-L205 type SettingsSimilarityDfr struct { AfterEffect dfraftereffect.DFRAfterEffect `json:"after_effect"` BasicModel dfrbasicmodel.DFRBasicModel `json:"basic_model"` diff --git a/typedapi/types/settingssimilarityib.go b/typedapi/types/settingssimilarityib.go index 5f0d51a0b7..b7671b2d8f 100644 --- a/typedapi/types/settingssimilarityib.go +++ b/typedapi/types/settingssimilarityib.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SettingsSimilarityIb type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L205-L210 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L207-L212 type SettingsSimilarityIb struct { Distribution ibdistribution.IBDistribution `json:"distribution"` Lambda iblambda.IBLambda `json:"lambda"` diff --git a/typedapi/types/settingssimilaritylmd.go b/typedapi/types/settingssimilaritylmd.go index feb2d31e83..e1c93ffeb1 100644 --- a/typedapi/types/settingssimilaritylmd.go +++ b/typedapi/types/settingssimilaritylmd.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SettingsSimilarityLmd type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L212-L215 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L214-L217 type SettingsSimilarityLmd struct { Mu *Float64 `json:"mu,omitempty"` Type string `json:"type,omitempty"` @@ -53,7 +53,7 @@ func (s *SettingsSimilarityLmd) UnmarshalJSON(data []byte) error { switch t { case "mu": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/settingssimilaritylmj.go b/typedapi/types/settingssimilaritylmj.go index 7ffebc4c3d..df55671436 100644 --- a/typedapi/types/settingssimilaritylmj.go +++ b/typedapi/types/settingssimilaritylmj.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SettingsSimilarityLmj type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L217-L220 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L219-L222 type SettingsSimilarityLmj struct { Lambda *Float64 `json:"lambda,omitempty"` Type string `json:"type,omitempty"` @@ -53,7 +53,7 @@ func (s *SettingsSimilarityLmj) UnmarshalJSON(data []byte) error { switch t { case "lambda": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/settingssimilarityscripted.go b/typedapi/types/settingssimilarityscripted.go index c5f71ae74c..5481b773a0 100644 --- a/typedapi/types/settingssimilarityscripted.go +++ b/typedapi/types/settingssimilarityscripted.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SettingsSimilarityScripted type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L222-L226 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L224-L228 type SettingsSimilarityScripted struct { Script Script `json:"script"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/shapefieldquery.go b/typedapi/types/shapefieldquery.go index 19df2cb09a..d399244147 100644 --- a/typedapi/types/shapefieldquery.go +++ b/typedapi/types/shapefieldquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // ShapeFieldQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L354-L367 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L366-L379 type ShapeFieldQuery struct { // IndexedShape Queries using a pre-indexed shape. IndexedShape *FieldLookup `json:"indexed_shape,omitempty"` diff --git a/typedapi/types/shapeproperty.go b/typedapi/types/shapeproperty.go index 877f4b8fb7..3dc9739163 100644 --- a/typedapi/types/shapeproperty.go +++ b/typedapi/types/shapeproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // ShapeProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/geo.ts#L73-L85 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/geo.ts#L73-L85 type ShapeProperty struct { Coerce *bool `json:"coerce,omitempty"` CopyTo []string `json:"copy_to,omitempty"` @@ -69,7 +69,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { switch t { case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -99,7 +99,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -124,7 +124,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -145,7 +145,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -235,12 +235,6 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -259,6 +253,18 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -415,6 +421,12 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -426,7 +438,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -441,7 +453,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -455,7 +467,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { } case "ignore_z_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -488,7 +500,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -509,7 +521,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -599,12 +611,6 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -623,6 +629,18 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -779,6 +797,12 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -801,7 +825,7 @@ func (s *ShapeProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shapequery.go b/typedapi/types/shapequery.go index 246581fd34..e625c1db37 100644 --- a/typedapi/types/shapequery.go +++ b/typedapi/types/shapequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShapeQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/specialized.ts#L344-L352 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L351-L364 type ShapeQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -62,7 +62,7 @@ func (s *ShapeQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -78,7 +78,7 @@ func (s *ShapeQuery) UnmarshalJSON(data []byte) error { } case "ignore_unmapped": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *ShapeQuery) UnmarshalJSON(data []byte) error { func (s ShapeQuery) MarshalJSON() ([]byte, error) { type opt ShapeQuery // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/shardcommit.go b/typedapi/types/shardcommit.go index e161f0b527..8a93622492 100644 --- a/typedapi/types/shardcommit.go +++ b/typedapi/types/shardcommit.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardCommit type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/types.ts#L112-L117 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/types.ts#L112-L117 type ShardCommit struct { Generation int `json:"generation"` Id string `json:"id"` @@ -56,7 +56,7 @@ func (s *ShardCommit) UnmarshalJSON(data []byte) error { case "generation": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -76,7 +76,7 @@ func (s *ShardCommit) UnmarshalJSON(data []byte) error { } case "num_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shardfailure.go b/typedapi/types/shardfailure.go index 634d34a869..524bd083bd 100644 --- a/typedapi/types/shardfailure.go +++ b/typedapi/types/shardfailure.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardFailure type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Errors.ts#L50-L56 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Errors.ts#L52-L58 type ShardFailure struct { Index *string `json:"index,omitempty"` Node *string `json:"node,omitempty"` @@ -79,7 +79,7 @@ func (s *ShardFailure) UnmarshalJSON(data []byte) error { case "shard": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shardfilesizeinfo.go b/typedapi/types/shardfilesizeinfo.go index 70b8cd975f..2c895dd44b 100644 --- a/typedapi/types/shardfilesizeinfo.go +++ b/typedapi/types/shardfilesizeinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardFileSizeInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/types.ts#L124-L131 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/types.ts#L124-L131 type ShardFileSizeInfo struct { AverageSizeInBytes *int64 `json:"average_size_in_bytes,omitempty"` Count *int64 `json:"count,omitempty"` @@ -57,7 +57,7 @@ func (s *ShardFileSizeInfo) UnmarshalJSON(data []byte) error { switch t { case "average_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *ShardFileSizeInfo) UnmarshalJSON(data []byte) error { } case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -99,7 +99,7 @@ func (s *ShardFileSizeInfo) UnmarshalJSON(data []byte) error { s.Description = o case "max_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *ShardFileSizeInfo) UnmarshalJSON(data []byte) error { } case "min_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -129,7 +129,7 @@ func (s *ShardFileSizeInfo) UnmarshalJSON(data []byte) error { } case "size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shardhealthstats.go b/typedapi/types/shardhealthstats.go index 7bf2f88736..f1b48148d3 100644 --- a/typedapi/types/shardhealthstats.go +++ b/typedapi/types/shardhealthstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // ShardHealthStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/health/types.ts#L36-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/health/types.ts#L36-L43 type ShardHealthStats struct { ActiveShards int `json:"active_shards"` InitializingShards int `json:"initializing_shards"` @@ -60,7 +60,7 @@ func (s *ShardHealthStats) UnmarshalJSON(data []byte) error { case "active_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -76,7 +76,7 @@ func (s *ShardHealthStats) UnmarshalJSON(data []byte) error { case "initializing_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *ShardHealthStats) UnmarshalJSON(data []byte) error { } case "primary_active": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -106,7 +106,7 @@ func (s *ShardHealthStats) UnmarshalJSON(data []byte) error { case "relocating_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -127,7 +127,7 @@ func (s *ShardHealthStats) UnmarshalJSON(data []byte) error { case "unassigned_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shardlease.go b/typedapi/types/shardlease.go index 257f86a569..2916d5edc4 100644 --- a/typedapi/types/shardlease.go +++ b/typedapi/types/shardlease.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardLease type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/types.ts#L133-L138 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/types.ts#L133-L138 type ShardLease struct { Id string `json:"id"` RetainingSeqNo int64 `json:"retaining_seq_no"` @@ -77,7 +77,7 @@ func (s *ShardLease) UnmarshalJSON(data []byte) error { s.Source = o case "timestamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shardmigrationstatus.go b/typedapi/types/shardmigrationstatus.go index 6c93afe9b0..2c1349043f 100644 --- a/typedapi/types/shardmigrationstatus.go +++ b/typedapi/types/shardmigrationstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // ShardMigrationStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L52-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/shutdown/get_node/ShutdownGetNodeResponse.ts#L52-L54 type ShardMigrationStatus struct { Status shutdownstatus.ShutdownStatus `json:"status"` } diff --git a/typedapi/types/shardpath.go b/typedapi/types/shardpath.go index 007448160c..038a31d729 100644 --- a/typedapi/types/shardpath.go +++ b/typedapi/types/shardpath.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardPath type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/types.ts#L140-L144 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/types.ts#L140-L144 type ShardPath struct { DataPath string `json:"data_path"` IsCustomDataPath bool `json:"is_custom_data_path"` @@ -66,7 +66,7 @@ func (s *ShardPath) UnmarshalJSON(data []byte) error { s.DataPath = o case "is_custom_data_path": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shardprofile.go b/typedapi/types/shardprofile.go index 0741e5580a..312a668259 100644 --- a/typedapi/types/shardprofile.go +++ b/typedapi/types/shardprofile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/profile.ts#L132-L137 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/profile.ts#L132-L137 type ShardProfile struct { Aggregations []AggregationProfile `json:"aggregations"` Fetch *FetchProfile `json:"fetch,omitempty"` diff --git a/typedapi/types/shardquerycache.go b/typedapi/types/shardquerycache.go index 2da6f5a5f9..a26a31d5f8 100644 --- a/typedapi/types/shardquerycache.go +++ b/typedapi/types/shardquerycache.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardQueryCache type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/types.ts#L146-L154 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/types.ts#L146-L154 type ShardQueryCache struct { CacheCount int64 `json:"cache_count"` CacheSize int64 `json:"cache_size"` @@ -58,7 +58,7 @@ func (s *ShardQueryCache) UnmarshalJSON(data []byte) error { switch t { case "cache_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -73,7 +73,7 @@ func (s *ShardQueryCache) UnmarshalJSON(data []byte) error { } case "cache_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *ShardQueryCache) UnmarshalJSON(data []byte) error { } case "evictions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -103,7 +103,7 @@ func (s *ShardQueryCache) UnmarshalJSON(data []byte) error { } case "hit_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -118,7 +118,7 @@ func (s *ShardQueryCache) UnmarshalJSON(data []byte) error { } case "memory_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -133,7 +133,7 @@ func (s *ShardQueryCache) UnmarshalJSON(data []byte) error { } case "miss_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -148,7 +148,7 @@ func (s *ShardQueryCache) UnmarshalJSON(data []byte) error { } case "total_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shardrecovery.go b/typedapi/types/shardrecovery.go index e3c4aa0227..f756934836 100644 --- a/typedapi/types/shardrecovery.go +++ b/typedapi/types/shardrecovery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardRecovery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/recovery/types.ts#L118-L135 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/recovery/types.ts#L118-L135 type ShardRecovery struct { Id int64 `json:"id"` Index RecoveryIndexStatus `json:"index"` @@ -67,7 +67,7 @@ func (s *ShardRecovery) UnmarshalJSON(data []byte) error { switch t { case "id": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *ShardRecovery) UnmarshalJSON(data []byte) error { } case "primary": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shardretentionleases.go b/typedapi/types/shardretentionleases.go index c106004c37..4975a3da10 100644 --- a/typedapi/types/shardretentionleases.go +++ b/typedapi/types/shardretentionleases.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardRetentionLeases type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/types.ts#L156-L160 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/types.ts#L156-L160 type ShardRetentionLeases struct { Leases []ShardLease `json:"leases"` PrimaryTerm int64 `json:"primary_term"` @@ -59,7 +59,7 @@ func (s *ShardRetentionLeases) UnmarshalJSON(data []byte) error { } case "primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shardrouting.go b/typedapi/types/shardrouting.go index 3c1d1e39bf..3ceb9a8098 100644 --- a/typedapi/types/shardrouting.go +++ b/typedapi/types/shardrouting.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,11 +33,11 @@ import ( // ShardRouting type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/types.ts#L162-L167 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/types.ts#L162-L167 type ShardRouting struct { Node string `json:"node"` Primary bool `json:"primary"` - RelocatingNode string `json:"relocating_node,omitempty"` + RelocatingNode *string `json:"relocating_node,omitempty"` State shardroutingstate.ShardRoutingState `json:"state"` } @@ -69,7 +69,7 @@ func (s *ShardRouting) UnmarshalJSON(data []byte) error { s.Node = o case "primary": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *ShardRouting) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.RelocatingNode = o + s.RelocatingNode = &o case "state": if err := dec.Decode(&s.State); err != nil { diff --git a/typedapi/types/shardsavailabilityindicator.go b/typedapi/types/shardsavailabilityindicator.go index 9f99eb3884..765b218696 100644 --- a/typedapi/types/shardsavailabilityindicator.go +++ b/typedapi/types/shardsavailabilityindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // ShardsAvailabilityIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L104-L108 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L104-L108 type ShardsAvailabilityIndicator struct { Details *ShardsAvailabilityIndicatorDetails `json:"details,omitempty"` Diagnosis []Diagnosis `json:"diagnosis,omitempty"` diff --git a/typedapi/types/shardsavailabilityindicatordetails.go b/typedapi/types/shardsavailabilityindicatordetails.go index b3f9bcbdc8..52f8e6eb85 100644 --- a/typedapi/types/shardsavailabilityindicatordetails.go +++ b/typedapi/types/shardsavailabilityindicatordetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardsAvailabilityIndicatorDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L109-L119 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L109-L119 type ShardsAvailabilityIndicatorDetails struct { CreatingPrimaries int64 `json:"creating_primaries"` InitializingPrimaries int64 `json:"initializing_primaries"` @@ -60,7 +60,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { switch t { case "creating_primaries": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -75,7 +75,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { } case "initializing_primaries": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -90,7 +90,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { } case "initializing_replicas": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -105,7 +105,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { } case "restarting_primaries": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -120,7 +120,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { } case "restarting_replicas": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -135,7 +135,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { } case "started_primaries": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -150,7 +150,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { } case "started_replicas": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -165,7 +165,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { } case "unassigned_primaries": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -180,7 +180,7 @@ func (s *ShardsAvailabilityIndicatorDetails) UnmarshalJSON(data []byte) error { } case "unassigned_replicas": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shardscapacityindicator.go b/typedapi/types/shardscapacityindicator.go index 6f16a6929c..8983e9f9ea 100644 --- a/typedapi/types/shardscapacityindicator.go +++ b/typedapi/types/shardscapacityindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // ShardsCapacityIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L171-L175 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L171-L175 type ShardsCapacityIndicator struct { Details *ShardsCapacityIndicatorDetails `json:"details,omitempty"` Diagnosis []Diagnosis `json:"diagnosis,omitempty"` diff --git a/typedapi/types/shardscapacityindicatordetails.go b/typedapi/types/shardscapacityindicatordetails.go index f01eb0d96d..8810663712 100644 --- a/typedapi/types/shardscapacityindicatordetails.go +++ b/typedapi/types/shardscapacityindicatordetails.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ShardsCapacityIndicatorDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L177-L180 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L177-L180 type ShardsCapacityIndicatorDetails struct { Data ShardsCapacityIndicatorTierDetail `json:"data"` Frozen ShardsCapacityIndicatorTierDetail `json:"frozen"` diff --git a/typedapi/types/shardscapacityindicatortierdetail.go b/typedapi/types/shardscapacityindicatortierdetail.go index 23125807b0..3a611e4225 100644 --- a/typedapi/types/shardscapacityindicatortierdetail.go +++ b/typedapi/types/shardscapacityindicatortierdetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardsCapacityIndicatorTierDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L182-L185 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L182-L185 type ShardsCapacityIndicatorTierDetail struct { CurrentUsedShards *int `json:"current_used_shards,omitempty"` MaxShardsInCluster int `json:"max_shards_in_cluster"` @@ -54,7 +54,7 @@ func (s *ShardsCapacityIndicatorTierDetail) UnmarshalJSON(data []byte) error { case "current_used_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *ShardsCapacityIndicatorTierDetail) UnmarshalJSON(data []byte) error { case "max_shards_in_cluster": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shardsegmentrouting.go b/typedapi/types/shardsegmentrouting.go index 8105beb223..bcc6d41f04 100644 --- a/typedapi/types/shardsegmentrouting.go +++ b/typedapi/types/shardsegmentrouting.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardSegmentRouting type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/segments/types.ts#L40-L44 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/segments/types.ts#L40-L44 type ShardSegmentRouting struct { Node string `json:"node"` Primary bool `json:"primary"` @@ -66,7 +66,7 @@ func (s *ShardSegmentRouting) UnmarshalJSON(data []byte) error { s.Node = o case "primary": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shardsequencenumber.go b/typedapi/types/shardsequencenumber.go index 7170ee1304..9529838114 100644 --- a/typedapi/types/shardsequencenumber.go +++ b/typedapi/types/shardsequencenumber.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardSequenceNumber type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/types.ts#L176-L180 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/types.ts#L176-L180 type ShardSequenceNumber struct { GlobalCheckpoint int64 `json:"global_checkpoint"` LocalCheckpoint int64 `json:"local_checkpoint"` @@ -54,7 +54,7 @@ func (s *ShardSequenceNumber) UnmarshalJSON(data []byte) error { switch t { case "global_checkpoint": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -69,7 +69,7 @@ func (s *ShardSequenceNumber) UnmarshalJSON(data []byte) error { } case "local_checkpoint": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shardsrecord.go b/typedapi/types/shardsrecord.go index 6ca7a162e8..b9848a450c 100644 --- a/typedapi/types/shardsrecord.go +++ b/typedapi/types/shardsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/shards/types.ts#L20-L421 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/shards/types.ts#L20-L421 type ShardsRecord struct { // BulkAvgSizeInBytes The average size in bytes of shard bulk operations. BulkAvgSizeInBytes *string `json:"bulk.avg_size_in_bytes,omitempty"` @@ -46,7 +46,7 @@ type ShardsRecord struct { // CompletionSize The size of completion. CompletionSize *string `json:"completion.size,omitempty"` // Docs The number of documents in the shard. - Docs string `json:"docs,omitempty"` + Docs *string `json:"docs,omitempty"` // FielddataEvictions The fielddata cache evictions. FielddataEvictions *string `json:"fielddata.evictions,omitempty"` // FielddataMemorySize The used fielddata cache memory. @@ -88,7 +88,7 @@ type ShardsRecord struct { // IndexingIndexTotal The number of indexing operations. IndexingIndexTotal *string `json:"indexing.index_total,omitempty"` // Ip The IP address of the node. - Ip string `json:"ip,omitempty"` + Ip *string `json:"ip,omitempty"` // MergesCurrent The number of current merge operations. MergesCurrent *string `json:"merges.current,omitempty"` // MergesCurrentDocs The number of current merging documents. @@ -104,7 +104,7 @@ type ShardsRecord struct { // MergesTotalTime The time spent merging documents. MergesTotalTime *string `json:"merges.total_time,omitempty"` // Node The name of node. - Node string `json:"node,omitempty"` + Node *string `json:"node,omitempty"` // PathData The shard data path. PathData *string `json:"path.data,omitempty"` // PathState The shard state path. @@ -174,7 +174,7 @@ type ShardsRecord struct { // `UNASSIGNED`: The shard is not assigned to any node. State *string `json:"state,omitempty"` // Store The disk space used by the shard. - Store string `json:"store,omitempty"` + Store *string `json:"store,omitempty"` // SyncId The sync identifier. SyncId *string `json:"sync_id,omitempty"` // UnassignedAt The time at which the shard became unassigned in Coordinated Universal Time @@ -324,7 +324,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.Docs = o + s.Docs = &o case "fielddata.evictions", "fe", "fielddataEvictions": var tmp json.RawMessage @@ -576,7 +576,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.Ip = o + s.Ip = &o case "merges.current", "mc", "mergesCurrent": var tmp json.RawMessage @@ -672,7 +672,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.Node = o + s.Node = &o case "path.data", "pd", "dataPath": var tmp json.RawMessage @@ -1056,7 +1056,7 @@ func (s *ShardsRecord) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.Store = o + s.Store = &o case "sync_id": var tmp json.RawMessage diff --git a/typedapi/types/shardssegment.go b/typedapi/types/shardssegment.go index 84773face2..29f6875279 100644 --- a/typedapi/types/shardssegment.go +++ b/typedapi/types/shardssegment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardsSegment type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/segments/types.ts#L46-L51 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/segments/types.ts#L46-L51 type ShardsSegment struct { NumCommittedSegments int `json:"num_committed_segments"` NumSearchSegments int `json:"num_search_segments"` @@ -56,7 +56,7 @@ func (s *ShardsSegment) UnmarshalJSON(data []byte) error { case "num_committed_segments": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *ShardsSegment) UnmarshalJSON(data []byte) error { case "num_search_segments": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shardsstatssummary.go b/typedapi/types/shardsstatssummary.go index 85e7dc0828..c00d8d4564 100644 --- a/typedapi/types/shardsstatssummary.go +++ b/typedapi/types/shardsstatssummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // ShardsStatsSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotShardsStatus.ts#L29-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotShardsStatus.ts#L29-L35 type ShardsStatsSummary struct { Incremental ShardsStatsSummaryItem `json:"incremental"` StartTimeInMillis int64 `json:"start_time_in_millis"` diff --git a/typedapi/types/shardsstatssummaryitem.go b/typedapi/types/shardsstatssummaryitem.go index 193a21acdd..22d845e38b 100644 --- a/typedapi/types/shardsstatssummaryitem.go +++ b/typedapi/types/shardsstatssummaryitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardsStatsSummaryItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotShardsStatus.ts#L37-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotShardsStatus.ts#L37-L40 type ShardsStatsSummaryItem struct { FileCount int64 `json:"file_count"` SizeInBytes int64 `json:"size_in_bytes"` @@ -53,7 +53,7 @@ func (s *ShardsStatsSummaryItem) UnmarshalJSON(data []byte) error { switch t { case "file_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -68,7 +68,7 @@ func (s *ShardsStatsSummaryItem) UnmarshalJSON(data []byte) error { } case "size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shardstatistics.go b/typedapi/types/shardstatistics.go index 93c4bc4424..cb0a3bb71d 100644 --- a/typedapi/types/shardstatistics.go +++ b/typedapi/types/shardstatistics.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ShardStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L54-L66 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L54-L66 type ShardStatistics struct { Failed uint `json:"failed"` Failures []ShardFailure `json:"failures,omitempty"` diff --git a/typedapi/types/shardstore.go b/typedapi/types/shardstore.go index fd2271b2b4..151bf66f0d 100644 --- a/typedapi/types/shardstore.go +++ b/typedapi/types/shardstore.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // ShardStore type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/shard_stores/types.ts#L30-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/shard_stores/types.ts#L30-L37 type ShardStore struct { Allocation shardstoreallocation.ShardStoreAllocation `json:"allocation"` AllocationId *string `json:"allocation_id,omitempty"` @@ -92,7 +92,7 @@ func (s *ShardStore) UnmarshalJSON(data []byte) error { func (s ShardStore) MarshalJSON() ([]byte, error) { type opt ShardStore // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/shardstoreexception.go b/typedapi/types/shardstoreexception.go index ffcca2517e..ea16f483ec 100644 --- a/typedapi/types/shardstoreexception.go +++ b/typedapi/types/shardstoreexception.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardStoreException type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/shard_stores/types.ts#L51-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/shard_stores/types.ts#L54-L57 type ShardStoreException struct { Reason string `json:"reason"` Type string `json:"type"` diff --git a/typedapi/types/shardstoreindex.go b/typedapi/types/shardstoreindex.go index d63880dd4d..2d53ac3325 100644 --- a/typedapi/types/shardstoreindex.go +++ b/typedapi/types/shardstoreindex.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ShardStoreIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search_shards/SearchShardsResponse.ts#L33-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search_shards/SearchShardsResponse.ts#L33-L36 type ShardStoreIndex struct { Aliases []string `json:"aliases,omitempty"` Filter *Query `json:"filter,omitempty"` diff --git a/typedapi/types/shardstorenode.go b/typedapi/types/shardstorenode.go index b069f0deb1..0ba8555016 100644 --- a/typedapi/types/shardstorenode.go +++ b/typedapi/types/shardstorenode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardStoreNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/shard_stores/types.ts#L36-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/shard_stores/types.ts#L39-L46 type ShardStoreNode struct { Attributes map[string]string `json:"attributes"` EphemeralId *string `json:"ephemeral_id,omitempty"` diff --git a/typedapi/types/shardstorewrapper.go b/typedapi/types/shardstorewrapper.go index 75606fe0e6..66a441760c 100644 --- a/typedapi/types/shardstorewrapper.go +++ b/typedapi/types/shardstorewrapper.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // ShardStoreWrapper type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/shard_stores/types.ts#L56-L58 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/shard_stores/types.ts#L59-L61 type ShardStoreWrapper struct { Stores []ShardStore `json:"stores"` } diff --git a/typedapi/types/shardstotalstats.go b/typedapi/types/shardstotalstats.go index 0fb34183c1..a16972bf60 100644 --- a/typedapi/types/shardstotalstats.go +++ b/typedapi/types/shardstotalstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShardsTotalStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/stats/types.ts#L182-L184 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/stats/types.ts#L182-L184 type ShardsTotalStats struct { TotalCount int64 `json:"total_count"` } @@ -52,7 +52,7 @@ func (s *ShardsTotalStats) UnmarshalJSON(data []byte) error { switch t { case "total_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shared.go b/typedapi/types/shared.go index 82d94d5c00..d8e6970cee 100644 --- a/typedapi/types/shared.go +++ b/typedapi/types/shared.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Shared type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/searchable_snapshots/cache_stats/Response.ts#L34-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/searchable_snapshots/cache_stats/Response.ts#L34-L43 type Shared struct { BytesReadInBytes ByteSize `json:"bytes_read_in_bytes"` BytesWrittenInBytes ByteSize `json:"bytes_written_in_bytes"` @@ -69,7 +69,7 @@ func (s *Shared) UnmarshalJSON(data []byte) error { } case "evictions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *Shared) UnmarshalJSON(data []byte) error { case "num_regions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *Shared) UnmarshalJSON(data []byte) error { } case "reads": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *Shared) UnmarshalJSON(data []byte) error { } case "writes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/sharedfilesystemrepository.go b/typedapi/types/sharedfilesystemrepository.go index b4fbafdacc..af005ae2fc 100644 --- a/typedapi/types/sharedfilesystemrepository.go +++ b/typedapi/types/sharedfilesystemrepository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SharedFileSystemRepository type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotRepository.ts#L55-L58 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotRepository.ts#L55-L58 type SharedFileSystemRepository struct { Settings SharedFileSystemRepositorySettings `json:"settings"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/sharedfilesystemrepositorysettings.go b/typedapi/types/sharedfilesystemrepositorysettings.go index 1a7a48bc0c..864f66ce9c 100644 --- a/typedapi/types/sharedfilesystemrepositorysettings.go +++ b/typedapi/types/sharedfilesystemrepositorysettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SharedFileSystemRepositorySettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotRepository.ts#L104-L108 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotRepository.ts#L104-L108 type SharedFileSystemRepositorySettings struct { ChunkSize ByteSize `json:"chunk_size,omitempty"` Compress *bool `json:"compress,omitempty"` @@ -63,7 +63,7 @@ func (s *SharedFileSystemRepositorySettings) UnmarshalJSON(data []byte) error { } case "compress": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -90,7 +90,7 @@ func (s *SharedFileSystemRepositorySettings) UnmarshalJSON(data []byte) error { case "max_number_of_snapshots": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -115,7 +115,7 @@ func (s *SharedFileSystemRepositorySettings) UnmarshalJSON(data []byte) error { } case "readonly": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shingletokenfilter.go b/typedapi/types/shingletokenfilter.go index d6dffb0e5a..3a8279d04f 100644 --- a/typedapi/types/shingletokenfilter.go +++ b/typedapi/types/shingletokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShingleTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L87-L95 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L87-L95 type ShingleTokenFilter struct { FillerToken *string `json:"filler_token,omitempty"` MaxShingleSize string `json:"max_shingle_size,omitempty"` @@ -95,7 +95,7 @@ func (s *ShingleTokenFilter) UnmarshalJSON(data []byte) error { s.MinShingleSize = o case "output_unigrams": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -109,7 +109,7 @@ func (s *ShingleTokenFilter) UnmarshalJSON(data []byte) error { } case "output_unigrams_if_no_shingles": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shortnumberproperty.go b/typedapi/types/shortnumberproperty.go index 4d0221c664..b05f0fe743 100644 --- a/typedapi/types/shortnumberproperty.go +++ b/typedapi/types/shortnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // ShortNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L159-L162 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L167-L170 type ShortNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -79,7 +79,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -150,7 +150,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -171,7 +171,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -261,12 +261,6 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -285,6 +279,18 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -441,6 +447,12 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -452,7 +464,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -467,7 +479,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -481,7 +493,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +531,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -540,7 +552,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -630,12 +642,6 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -654,6 +660,18 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -810,6 +828,12 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -868,7 +892,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -882,7 +906,7 @@ func (s *ShortNumberProperty) UnmarshalJSON(data []byte) error { } case "time_series_dimension": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/shrinkconfiguration.go b/typedapi/types/shrinkconfiguration.go index 9c4960d3d9..f0c800a9f9 100644 --- a/typedapi/types/shrinkconfiguration.go +++ b/typedapi/types/shrinkconfiguration.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ShrinkConfiguration type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/_types/Phase.ts#L60-L62 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/_types/Phase.ts#L60-L62 type ShrinkConfiguration struct { NumberOfShards int `json:"number_of_shards"` } @@ -53,7 +53,7 @@ func (s *ShrinkConfiguration) UnmarshalJSON(data []byte) error { case "number_of_shards": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/significantlongtermsaggregate.go b/typedapi/types/significantlongtermsaggregate.go index ac3855d313..fe9d03c614 100644 --- a/typedapi/types/significantlongtermsaggregate.go +++ b/typedapi/types/significantlongtermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SignificantLongTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L588-L590 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L592-L594 type SignificantLongTermsAggregate struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantLongTermsBucket `json:"buckets"` @@ -55,7 +55,7 @@ func (s *SignificantLongTermsAggregate) UnmarshalJSON(data []byte) error { switch t { case "bg_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *SignificantLongTermsAggregate) UnmarshalJSON(data []byte) error { } case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/significantlongtermsbucket.go b/typedapi/types/significantlongtermsbucket.go index 712279ac3b..58c3c6d9a5 100644 --- a/typedapi/types/significantlongtermsbucket.go +++ b/typedapi/types/significantlongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // SignificantLongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L597-L600 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L601-L604 type SignificantLongTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` BgCount int64 `json:"bg_count"` @@ -58,7 +58,7 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { switch t { case "bg_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -73,7 +73,7 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { } case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { } case "key": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -115,7 +115,7 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { s.KeyAsString = &o case "score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -575,7 +575,7 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -625,7 +625,7 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -635,7 +635,7 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -652,7 +652,7 @@ func (s *SignificantLongTermsBucket) UnmarshalJSON(data []byte) error { func (s SignificantLongTermsBucket) MarshalJSON() ([]byte, error) { type opt SignificantLongTermsBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/significantstringtermsaggregate.go b/typedapi/types/significantstringtermsaggregate.go index c9f948cd8e..af8c0dadc8 100644 --- a/typedapi/types/significantstringtermsaggregate.go +++ b/typedapi/types/significantstringtermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SignificantStringTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L602-L604 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L606-L608 type SignificantStringTermsAggregate struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantStringTermsBucket `json:"buckets"` @@ -55,7 +55,7 @@ func (s *SignificantStringTermsAggregate) UnmarshalJSON(data []byte) error { switch t { case "bg_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *SignificantStringTermsAggregate) UnmarshalJSON(data []byte) error { } case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/significantstringtermsbucket.go b/typedapi/types/significantstringtermsbucket.go index 78056cd61d..c4c4c5b13c 100644 --- a/typedapi/types/significantstringtermsbucket.go +++ b/typedapi/types/significantstringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // SignificantStringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L606-L608 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L610-L612 type SignificantStringTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` BgCount int64 `json:"bg_count"` @@ -57,7 +57,7 @@ func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error { switch t { case "bg_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error { } case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -99,7 +99,7 @@ func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error { s.Key = o case "score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -559,7 +559,7 @@ func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -609,7 +609,7 @@ func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -619,7 +619,7 @@ func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -636,7 +636,7 @@ func (s *SignificantStringTermsBucket) UnmarshalJSON(data []byte) error { func (s SignificantStringTermsBucket) MarshalJSON() ([]byte, error) { type opt SignificantStringTermsBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/significanttermsaggregatebasesignificantlongtermsbucket.go b/typedapi/types/significanttermsaggregatebasesignificantlongtermsbucket.go index 9b62a1720e..e86588ccd2 100644 --- a/typedapi/types/significanttermsaggregatebasesignificantlongtermsbucket.go +++ b/typedapi/types/significanttermsaggregatebasesignificantlongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SignificantTermsAggregateBaseSignificantLongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L581-L586 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L585-L590 type SignificantTermsAggregateBaseSignificantLongTermsBucket struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantLongTermsBucket `json:"buckets"` @@ -55,7 +55,7 @@ func (s *SignificantTermsAggregateBaseSignificantLongTermsBucket) UnmarshalJSON( switch t { case "bg_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *SignificantTermsAggregateBaseSignificantLongTermsBucket) UnmarshalJSON( } case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/significanttermsaggregatebasesignificantstringtermsbucket.go b/typedapi/types/significanttermsaggregatebasesignificantstringtermsbucket.go index bb5398b8c5..8cfbdd25c9 100644 --- a/typedapi/types/significanttermsaggregatebasesignificantstringtermsbucket.go +++ b/typedapi/types/significanttermsaggregatebasesignificantstringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SignificantTermsAggregateBaseSignificantStringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L581-L586 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L585-L590 type SignificantTermsAggregateBaseSignificantStringTermsBucket struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsSignificantStringTermsBucket `json:"buckets"` @@ -55,7 +55,7 @@ func (s *SignificantTermsAggregateBaseSignificantStringTermsBucket) UnmarshalJSO switch t { case "bg_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *SignificantTermsAggregateBaseSignificantStringTermsBucket) UnmarshalJSO } case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/significanttermsaggregatebasevoid.go b/typedapi/types/significanttermsaggregatebasevoid.go index a8a54c5017..3e3ebb6716 100644 --- a/typedapi/types/significanttermsaggregatebasevoid.go +++ b/typedapi/types/significanttermsaggregatebasevoid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SignificantTermsAggregateBaseVoid type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L581-L586 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L585-L590 type SignificantTermsAggregateBaseVoid struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsVoid `json:"buckets"` @@ -55,7 +55,7 @@ func (s *SignificantTermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { switch t { case "bg_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -77,13 +77,13 @@ func (s *SignificantTermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { localDec := json.NewDecoder(source) switch rawMsg[0] { case '{': - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := localDec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': - o := []interface{}{} + o := []any{} if err := localDec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Buckets", err) } @@ -91,7 +91,7 @@ func (s *SignificantTermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { } case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/significanttermsaggregation.go b/typedapi/types/significanttermsaggregation.go index eb5d8a0073..49cd54caea 100644 --- a/typedapi/types/significanttermsaggregation.go +++ b/typedapi/types/significanttermsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // SignificantTermsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L770-L834 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L772-L836 type SignificantTermsAggregation struct { // BackgroundFilter A background filter that can be used to focus in on significant terms within // a narrower context, instead of the entire index. @@ -54,14 +54,12 @@ type SignificantTermsAggregation struct { // Include Terms to include. Include TermsInclude `json:"include,omitempty"` // Jlh Use JLH score as the significance score. - Jlh *EmptyObject `json:"jlh,omitempty"` - Meta Metadata `json:"meta,omitempty"` + Jlh *EmptyObject `json:"jlh,omitempty"` // MinDocCount Only return terms that are found in more than `min_doc_count` hits. MinDocCount *int64 `json:"min_doc_count,omitempty"` // MutualInformation Use mutual information as described in "Information Retrieval", Manning et // al., Chapter 13.5.1, as the significance score. MutualInformation *MutualInformationHeuristic `json:"mutual_information,omitempty"` - Name *string `json:"name,omitempty"` // Percentage A simple calculation of the number of documents in the foreground sample with // a term divided by the number of documents in the background with the term. Percentage *PercentageScoreHeuristic `json:"percentage,omitempty"` @@ -146,13 +144,8 @@ func (s *SignificantTermsAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Jlh", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "min_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -171,18 +164,6 @@ func (s *SignificantTermsAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "MutualInformation", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "percentage": if err := dec.Decode(&s.Percentage); err != nil { return fmt.Errorf("%s | %w", "Percentage", err) @@ -194,7 +175,7 @@ func (s *SignificantTermsAggregation) UnmarshalJSON(data []byte) error { } case "shard_min_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -210,7 +191,7 @@ func (s *SignificantTermsAggregation) UnmarshalJSON(data []byte) error { case "shard_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -226,7 +207,7 @@ func (s *SignificantTermsAggregation) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/significanttextaggregation.go b/typedapi/types/significanttextaggregation.go index 743b83724a..c19462973a 100644 --- a/typedapi/types/significanttextaggregation.go +++ b/typedapi/types/significanttextaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // SignificantTextAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L836-L908 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L838-L910 type SignificantTextAggregation struct { // BackgroundFilter A background filter that can be used to focus in on significant terms within // a narrower context, instead of the entire index. @@ -56,14 +56,12 @@ type SignificantTextAggregation struct { // Include Values to include. Include TermsInclude `json:"include,omitempty"` // Jlh Use JLH score as the significance score. - Jlh *EmptyObject `json:"jlh,omitempty"` - Meta Metadata `json:"meta,omitempty"` + Jlh *EmptyObject `json:"jlh,omitempty"` // MinDocCount Only return values that are found in more than `min_doc_count` hits. MinDocCount *int64 `json:"min_doc_count,omitempty"` // MutualInformation Use mutual information as described in "Information Retrieval", Manning et // al., Chapter 13.5.1, as the significance score. MutualInformation *MutualInformationHeuristic `json:"mutual_information,omitempty"` - Name *string `json:"name,omitempty"` // Percentage A simple calculation of the number of documents in the foreground sample with // a term divided by the number of documents in the background with the term. Percentage *PercentageScoreHeuristic `json:"percentage,omitempty"` @@ -136,7 +134,7 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { } case "filter_duplicate_text": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -164,13 +162,8 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Jlh", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "min_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -189,18 +182,6 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "MutualInformation", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "percentage": if err := dec.Decode(&s.Percentage); err != nil { return fmt.Errorf("%s | %w", "Percentage", err) @@ -212,7 +193,7 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { } case "shard_min_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -228,7 +209,7 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { case "shard_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -244,7 +225,7 @@ func (s *SignificantTextAggregation) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/simpleanalyzer.go b/typedapi/types/simpleanalyzer.go index 8ee7e5336f..d26b60f8f9 100644 --- a/typedapi/types/simpleanalyzer.go +++ b/typedapi/types/simpleanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SimpleAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/analyzers.ts#L83-L86 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/analyzers.ts#L83-L86 type SimpleAnalyzer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` diff --git a/typedapi/types/simplemovingaverageaggregation.go b/typedapi/types/simplemovingaverageaggregation.go index 6f424cb9e9..b19e73c72c 100644 --- a/typedapi/types/simplemovingaverageaggregation.go +++ b/typedapi/types/simplemovingaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // SimpleMovingAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L247-L250 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L247-L250 type SimpleMovingAverageAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,10 +43,8 @@ type SimpleMovingAverageAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` Minimize *bool `json:"minimize,omitempty"` Model string `json:"model,omitempty"` - Name *string `json:"name,omitempty"` Predict *int `json:"predict,omitempty"` Settings EmptyObject `json:"settings"` Window *int `json:"window,omitempty"` @@ -89,13 +87,8 @@ func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "minimize": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,21 +106,9 @@ func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Model", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "predict": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -148,7 +129,7 @@ func (s *SimpleMovingAverageAggregation) UnmarshalJSON(data []byte) error { case "window": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -174,10 +155,8 @@ func (s SimpleMovingAverageAggregation) MarshalJSON() ([]byte, error) { BucketsPath: s.BucketsPath, Format: s.Format, GapPolicy: s.GapPolicy, - Meta: s.Meta, Minimize: s.Minimize, Model: s.Model, - Name: s.Name, Predict: s.Predict, Settings: s.Settings, Window: s.Window, diff --git a/typedapi/types/simplequerystringflags.go b/typedapi/types/simplequerystringflags.go index 8d746b7f34..b3429f522e 100644 --- a/typedapi/types/simplequerystringflags.go +++ b/typedapi/types/simplequerystringflags.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // SimpleQueryStringFlags type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L702-L706 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L702-L706 type SimpleQueryStringFlags PipeSeparatedFlagsSimpleQueryStringFlag diff --git a/typedapi/types/simplequerystringquery.go b/typedapi/types/simplequerystringquery.go index 31caf55801..f08f8f1907 100644 --- a/typedapi/types/simplequerystringquery.go +++ b/typedapi/types/simplequerystringquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // SimpleQueryStringQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/fulltext.ts#L765-L830 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/fulltext.ts#L765-L830 type SimpleQueryStringQuery struct { // AnalyzeWildcard If `true`, the query attempts to analyze wildcard terms in the query string. AnalyzeWildcard *bool `json:"analyze_wildcard,omitempty"` @@ -96,7 +96,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { switch t { case "analyze_wildcard": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -122,7 +122,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { s.Analyzer = &o case "auto_generate_synonyms_phrase_query": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -136,7 +136,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { } case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -168,7 +168,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { case "fuzzy_max_expansions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -184,7 +184,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { case "fuzzy_prefix_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -199,7 +199,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { } case "fuzzy_transpositions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -213,7 +213,7 @@ func (s *SimpleQueryStringQuery) UnmarshalJSON(data []byte) error { } case "lenient": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/simplevalueaggregate.go b/typedapi/types/simplevalueaggregate.go index 321aa0ca61..88280cf73f 100644 --- a/typedapi/types/simplevalueaggregate.go +++ b/typedapi/types/simplevalueaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,14 +31,14 @@ import ( // SimpleValueAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L224-L225 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L224-L225 type SimpleValueAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. - Value Float64 `json:"value,omitempty"` - ValueAsString *string `json:"value_as_string,omitempty"` + Value *Float64 `json:"value,omitempty"` + ValueAsString *string `json:"value_as_string,omitempty"` } func (s *SimpleValueAggregate) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/simulatedactions.go b/typedapi/types/simulatedactions.go index 9a1d557286..63f67b967a 100644 --- a/typedapi/types/simulatedactions.go +++ b/typedapi/types/simulatedactions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SimulatedActions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Action.ts#L96-L100 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Action.ts#L96-L100 type SimulatedActions struct { Actions []string `json:"actions"` All *SimulatedActions `json:"all,omitempty"` @@ -64,7 +64,7 @@ func (s *SimulatedActions) UnmarshalJSON(data []byte) error { } case "use_all": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/simulateingest.go b/typedapi/types/simulateingest.go index 918310d0dc..17b4a57ad2 100644 --- a/typedapi/types/simulateingest.go +++ b/typedapi/types/simulateingest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SimulateIngest type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/simulate/types.ts#L28-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/simulate/types.ts#L28-L31 type SimulateIngest struct { Pipeline *string `json:"pipeline,omitempty"` Timestamp DateTime `json:"timestamp"` diff --git a/typedapi/types/sizefield.go b/typedapi/types/sizefield.go index 042b6cd837..3a31905385 100644 --- a/typedapi/types/sizefield.go +++ b/typedapi/types/sizefield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SizeField type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/meta-fields.ts#L54-L56 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/meta-fields.ts#L54-L56 type SizeField struct { Enabled bool `json:"enabled"` } @@ -52,7 +52,7 @@ func (s *SizeField) UnmarshalJSON(data []byte) error { switch t { case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/slackaction.go b/typedapi/types/slackaction.go index 0c13abf263..76e9606507 100644 --- a/typedapi/types/slackaction.go +++ b/typedapi/types/slackaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SlackAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L91-L94 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L91-L94 type SlackAction struct { Account *string `json:"account,omitempty"` Message SlackMessage `json:"message"` diff --git a/typedapi/types/slackattachment.go b/typedapi/types/slackattachment.go index c44d27f400..3706e7eb31 100644 --- a/typedapi/types/slackattachment.go +++ b/typedapi/types/slackattachment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SlackAttachment type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L101-L117 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L101-L117 type SlackAttachment struct { AuthorIcon *string `json:"author_icon,omitempty"` AuthorLink *string `json:"author_link,omitempty"` diff --git a/typedapi/types/slackattachmentfield.go b/typedapi/types/slackattachmentfield.go index 9137defe37..412c5903d3 100644 --- a/typedapi/types/slackattachmentfield.go +++ b/typedapi/types/slackattachmentfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SlackAttachmentField type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L119-L123 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L119-L123 type SlackAttachmentField struct { Int bool `json:"short"` Title string `json:"title"` @@ -54,7 +54,7 @@ func (s *SlackAttachmentField) UnmarshalJSON(data []byte) error { switch t { case "short": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/slackdynamicattachment.go b/typedapi/types/slackdynamicattachment.go index d2fbf38c37..8d4247189b 100644 --- a/typedapi/types/slackdynamicattachment.go +++ b/typedapi/types/slackdynamicattachment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SlackDynamicAttachment type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L125-L128 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L125-L128 type SlackDynamicAttachment struct { AttachmentTemplate SlackAttachment `json:"attachment_template"` ListPath string `json:"list_path"` diff --git a/typedapi/types/slackmessage.go b/typedapi/types/slackmessage.go index 838fa4a4a2..124171f2ba 100644 --- a/typedapi/types/slackmessage.go +++ b/typedapi/types/slackmessage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SlackMessage type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L130-L137 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L130-L137 type SlackMessage struct { Attachments []SlackAttachment `json:"attachments"` DynamicAttachments *SlackDynamicAttachment `json:"dynamic_attachments,omitempty"` diff --git a/typedapi/types/slackresult.go b/typedapi/types/slackresult.go index 113149b313..f1aa31c067 100644 --- a/typedapi/types/slackresult.go +++ b/typedapi/types/slackresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SlackResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L96-L99 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L96-L99 type SlackResult struct { Account *string `json:"account,omitempty"` Message SlackMessage `json:"message"` diff --git a/typedapi/types/slicedscroll.go b/typedapi/types/slicedscroll.go index 151cc5a1d2..99b42a7b25 100644 --- a/typedapi/types/slicedscroll.go +++ b/typedapi/types/slicedscroll.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SlicedScroll type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/SlicedScroll.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/SlicedScroll.ts#L23-L27 type SlicedScroll struct { Field *string `json:"field,omitempty"` Id string `json:"id"` @@ -65,7 +65,7 @@ func (s *SlicedScroll) UnmarshalJSON(data []byte) error { case "max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/slices.go b/typedapi/types/slices.go index 4e92d9fae4..f2ff5a3b20 100644 --- a/typedapi/types/slices.go +++ b/typedapi/types/slices.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // int // slicescalculation.SlicesCalculation // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L361-L366 -type Slices interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L364-L369 +type Slices any diff --git a/typedapi/types/slm.go b/typedapi/types/slm.go index 40cd5dbae7..179b98cc81 100644 --- a/typedapi/types/slm.go +++ b/typedapi/types/slm.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Slm type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L449-L452 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L449-L452 type Slm struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -55,7 +55,7 @@ func (s *Slm) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -69,7 +69,7 @@ func (s *Slm) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -84,7 +84,7 @@ func (s *Slm) UnmarshalJSON(data []byte) error { case "policy_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/slmindicator.go b/typedapi/types/slmindicator.go index 80bbf73847..ab7e2e0d34 100644 --- a/typedapi/types/slmindicator.go +++ b/typedapi/types/slmindicator.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // SlmIndicator type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L155-L159 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L155-L159 type SlmIndicator struct { Details *SlmIndicatorDetails `json:"details,omitempty"` Diagnosis []Diagnosis `json:"diagnosis,omitempty"` diff --git a/typedapi/types/slmindicatordetails.go b/typedapi/types/slmindicatordetails.go index 20dfdaa6e4..0a20fc996f 100644 --- a/typedapi/types/slmindicatordetails.go +++ b/typedapi/types/slmindicatordetails.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // SlmIndicatorDetails type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L160-L164 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L160-L164 type SlmIndicatorDetails struct { Policies int64 `json:"policies"` SlmStatus lifecycleoperationmode.LifecycleOperationMode `json:"slm_status"` @@ -56,7 +56,7 @@ func (s *SlmIndicatorDetails) UnmarshalJSON(data []byte) error { switch t { case "policies": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/slmindicatorunhealthypolicies.go b/typedapi/types/slmindicatorunhealthypolicies.go index 8258847212..7509eb6756 100644 --- a/typedapi/types/slmindicatorunhealthypolicies.go +++ b/typedapi/types/slmindicatorunhealthypolicies.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SlmIndicatorUnhealthyPolicies type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/health_report/types.ts#L166-L169 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/health_report/types.ts#L166-L169 type SlmIndicatorUnhealthyPolicies struct { Count int64 `json:"count"` InvocationsSinceLastSuccess map[string]int64 `json:"invocations_since_last_success,omitempty"` @@ -53,7 +53,7 @@ func (s *SlmIndicatorUnhealthyPolicies) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/slmpolicy.go b/typedapi/types/slmpolicy.go index 4b410cd20b..2b89fe1793 100644 --- a/typedapi/types/slmpolicy.go +++ b/typedapi/types/slmpolicy.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SLMPolicy type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/_types/SnapshotLifecycle.ts#L76-L82 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/_types/SnapshotLifecycle.ts#L76-L82 type SLMPolicy struct { Config *Configuration `json:"config,omitempty"` Name string `json:"name"` diff --git a/typedapi/types/slowlogsettings.go b/typedapi/types/slowlogsettings.go index 4c113c0a07..214e43ada9 100644 --- a/typedapi/types/slowlogsettings.go +++ b/typedapi/types/slowlogsettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SlowlogSettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L479-L484 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L490-L495 type SlowlogSettings struct { Level *string `json:"level,omitempty"` Reformat *bool `json:"reformat,omitempty"` @@ -67,7 +67,7 @@ func (s *SlowlogSettings) UnmarshalJSON(data []byte) error { s.Level = &o case "reformat": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -82,7 +82,7 @@ func (s *SlowlogSettings) UnmarshalJSON(data []byte) error { case "source": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/slowlogtresholdlevels.go b/typedapi/types/slowlogtresholdlevels.go index b57828af96..2f79320cf1 100644 --- a/typedapi/types/slowlogtresholdlevels.go +++ b/typedapi/types/slowlogtresholdlevels.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SlowlogTresholdLevels type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L491-L496 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L502-L507 type SlowlogTresholdLevels struct { Debug Duration `json:"debug,omitempty"` Info Duration `json:"info,omitempty"` diff --git a/typedapi/types/slowlogtresholds.go b/typedapi/types/slowlogtresholds.go index 1b7b727d37..def870d41a 100644 --- a/typedapi/types/slowlogtresholds.go +++ b/typedapi/types/slowlogtresholds.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // SlowlogTresholds type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L486-L489 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L497-L500 type SlowlogTresholds struct { Fetch *SlowlogTresholdLevels `json:"fetch,omitempty"` Query *SlowlogTresholdLevels `json:"query,omitempty"` diff --git a/typedapi/types/smoothingmodelcontainer.go b/typedapi/types/smoothingmodelcontainer.go index 2e8ade4f80..800f23814a 100644 --- a/typedapi/types/smoothingmodelcontainer.go +++ b/typedapi/types/smoothingmodelcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // SmoothingModelContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L442-L458 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L445-L461 type SmoothingModelContainer struct { // Laplace A smoothing model that uses an additive smoothing where a constant (typically // `1.0` or smaller) is added to all counts to balance weights. diff --git a/typedapi/types/snapshotindexstats.go b/typedapi/types/snapshotindexstats.go index 541374da47..3c6baf9afe 100644 --- a/typedapi/types/snapshotindexstats.go +++ b/typedapi/types/snapshotindexstats.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // SnapshotIndexStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotIndexStats.ts#L25-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotIndexStats.ts#L25-L29 type SnapshotIndexStats struct { Shards map[string]SnapshotShardsStatus `json:"shards"` ShardsStats SnapshotShardsStats `json:"shards_stats"` diff --git a/typedapi/types/snapshotinfo.go b/typedapi/types/snapshotinfo.go index f7ce792e69..a8a220dde8 100644 --- a/typedapi/types/snapshotinfo.go +++ b/typedapi/types/snapshotinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SnapshotInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotInfo.ts#L41-L71 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotInfo.ts#L41-L71 type SnapshotInfo struct { DataStreams []string `json:"data_streams"` Duration Duration `json:"duration,omitempty"` @@ -107,7 +107,7 @@ func (s *SnapshotInfo) UnmarshalJSON(data []byte) error { } case "include_global_state": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/snapshotlifecycle.go b/typedapi/types/snapshotlifecycle.go index 768e0b670f..31696af148 100644 --- a/typedapi/types/snapshotlifecycle.go +++ b/typedapi/types/snapshotlifecycle.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SnapshotLifecycle type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/_types/SnapshotLifecycle.ts#L38-L49 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/_types/SnapshotLifecycle.ts#L38-L49 type SnapshotLifecycle struct { InProgress *InProgress `json:"in_progress,omitempty"` LastFailure *Invocation `json:"last_failure,omitempty"` diff --git a/typedapi/types/snapshotresponseitem.go b/typedapi/types/snapshotresponseitem.go index 8ac99a5a0f..3ca7844a48 100644 --- a/typedapi/types/snapshotresponseitem.go +++ b/typedapi/types/snapshotresponseitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SnapshotResponseItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/get/SnapshotGetResponse.ts#L44-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/get/SnapshotGetResponse.ts#L44-L48 type SnapshotResponseItem struct { Error *ErrorCause `json:"error,omitempty"` Repository string `json:"repository"` diff --git a/typedapi/types/snapshotrestore.go b/typedapi/types/snapshotrestore.go index 7ea1c5a38d..0676a621d2 100644 --- a/typedapi/types/snapshotrestore.go +++ b/typedapi/types/snapshotrestore.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SnapshotRestore type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/restore/SnapshotRestoreResponse.ts#L27-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/restore/SnapshotRestoreResponse.ts#L27-L31 type SnapshotRestore struct { Indices []string `json:"indices"` Shards ShardStatistics `json:"shards"` diff --git a/typedapi/types/snapshotshardfailure.go b/typedapi/types/snapshotshardfailure.go index bcb2b5ebbc..3ca8785c70 100644 --- a/typedapi/types/snapshotshardfailure.go +++ b/typedapi/types/snapshotshardfailure.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SnapshotShardFailure type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotShardFailure.ts#L22-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotShardFailure.ts#L22-L28 type SnapshotShardFailure struct { Index string `json:"index"` NodeId *string `json:"node_id,omitempty"` diff --git a/typedapi/types/snapshotshardsstats.go b/typedapi/types/snapshotshardsstats.go index 5bebd0998c..55c915a88e 100644 --- a/typedapi/types/snapshotshardsstats.go +++ b/typedapi/types/snapshotshardsstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SnapshotShardsStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotShardsStats.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotShardsStats.ts#L22-L29 type SnapshotShardsStats struct { Done int64 `json:"done"` Failed int64 `json:"failed"` @@ -57,7 +57,7 @@ func (s *SnapshotShardsStats) UnmarshalJSON(data []byte) error { switch t { case "done": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *SnapshotShardsStats) UnmarshalJSON(data []byte) error { } case "failed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *SnapshotShardsStats) UnmarshalJSON(data []byte) error { } case "finalizing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -102,7 +102,7 @@ func (s *SnapshotShardsStats) UnmarshalJSON(data []byte) error { } case "initializing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -117,7 +117,7 @@ func (s *SnapshotShardsStats) UnmarshalJSON(data []byte) error { } case "started": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -132,7 +132,7 @@ func (s *SnapshotShardsStats) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/snapshotshardsstatus.go b/typedapi/types/snapshotshardsstatus.go index 394ea2e2b0..5d30e6a82e 100644 --- a/typedapi/types/snapshotshardsstatus.go +++ b/typedapi/types/snapshotshardsstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // SnapshotShardsStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotShardsStatus.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotShardsStatus.ts#L24-L27 type SnapshotShardsStatus struct { Stage shardsstatsstage.ShardsStatsStage `json:"stage"` Stats ShardsStatsSummary `json:"stats"` diff --git a/typedapi/types/snapshotsrecord.go b/typedapi/types/snapshotsrecord.go index be2df91c63..abe53c0810 100644 --- a/typedapi/types/snapshotsrecord.go +++ b/typedapi/types/snapshotsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SnapshotsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/snapshots/types.ts#L24-L96 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/snapshots/types.ts#L24-L96 type SnapshotsRecord struct { // Duration The time it took the snapshot process to complete, in time units. Duration Duration `json:"duration,omitempty"` diff --git a/typedapi/types/snapshotstats.go b/typedapi/types/snapshotstats.go index 34b2113aed..91fe7d411c 100644 --- a/typedapi/types/snapshotstats.go +++ b/typedapi/types/snapshotstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SnapshotStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotStats.ts#L23-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotStats.ts#L23-L29 type SnapshotStats struct { Incremental FileCountSnapshotStats `json:"incremental"` StartTimeInMillis int64 `json:"start_time_in_millis"` diff --git a/typedapi/types/snowballanalyzer.go b/typedapi/types/snowballanalyzer.go index d5e3353b31..c302ddef9b 100644 --- a/typedapi/types/snowballanalyzer.go +++ b/typedapi/types/snowballanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // SnowballAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/analyzers.ts#L88-L93 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/analyzers.ts#L88-L93 type SnowballAnalyzer struct { Language snowballlanguage.SnowballLanguage `json:"language"` Stopwords []string `json:"stopwords,omitempty"` diff --git a/typedapi/types/snowballtokenfilter.go b/typedapi/types/snowballtokenfilter.go index 055cda0899..90a4cb73ff 100644 --- a/typedapi/types/snowballtokenfilter.go +++ b/typedapi/types/snowballtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // SnowballTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L309-L312 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L311-L314 type SnowballTokenFilter struct { Language snowballlanguage.SnowballLanguage `json:"language"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/softdeletes.go b/typedapi/types/softdeletes.go index 99c85c7ac9..883cfad8c1 100644 --- a/typedapi/types/softdeletes.go +++ b/typedapi/types/softdeletes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SoftDeletes type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L50-L63 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L50-L63 type SoftDeletes struct { // Enabled Indicates whether soft deletes are enabled on the index. Enabled *bool `json:"enabled,omitempty"` @@ -61,7 +61,7 @@ func (s *SoftDeletes) UnmarshalJSON(data []byte) error { switch t { case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/sort.go b/typedapi/types/sort.go index bd8999bfed..bc4ce5a582 100644 --- a/typedapi/types/sort.go +++ b/typedapi/types/sort.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Sort type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/sort.ts#L99-L99 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/sort.ts#L104-L104 type Sort []SortCombinations diff --git a/typedapi/types/sortcombinations.go b/typedapi/types/sortcombinations.go index 9b77d69db3..c07e1289ef 100644 --- a/typedapi/types/sortcombinations.go +++ b/typedapi/types/sortcombinations.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // string // SortOptions // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/sort.ts#L93-L97 -type SortCombinations interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/sort.ts#L98-L102 +type SortCombinations any diff --git a/typedapi/types/sortoptions.go b/typedapi/types/sortoptions.go index 93c604d589..c2f4f26951 100644 --- a/typedapi/types/sortoptions.go +++ b/typedapi/types/sortoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -27,7 +27,7 @@ import ( // SortOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/sort.ts#L82-L91 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/sort.ts#L86-L96 type SortOptions struct { Doc_ *ScoreSort `json:"_doc,omitempty"` GeoDistance_ *GeoDistanceSort `json:"_geo_distance,omitempty"` @@ -40,7 +40,7 @@ type SortOptions struct { func (s SortOptions) MarshalJSON() ([]byte, error) { type opt SortOptions // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/sortprocessor.go b/typedapi/types/sortprocessor.go index 7f0296a3a7..b7c63786b2 100644 --- a/typedapi/types/sortprocessor.go +++ b/typedapi/types/sortprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // SortProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L1075-L1091 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L1079-L1095 type SortProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -102,7 +102,7 @@ func (s *SortProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/sourceconfig.go b/typedapi/types/sourceconfig.go index 5b6e453ada..d908f95f72 100644 --- a/typedapi/types/sourceconfig.go +++ b/typedapi/types/sourceconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // bool // SourceFilter // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/SourceFilter.ts#L33-L37 -type SourceConfig interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/SourceFilter.ts#L33-L37 +type SourceConfig any diff --git a/typedapi/types/sourceconfigparam.go b/typedapi/types/sourceconfigparam.go index 4457be01ac..65bd250b22 100644 --- a/typedapi/types/sourceconfigparam.go +++ b/typedapi/types/sourceconfigparam.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // bool // []string // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/SourceFilter.ts#L39-L45 -type SourceConfigParam interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/SourceFilter.ts#L39-L45 +type SourceConfigParam any diff --git a/typedapi/types/sourcefield.go b/typedapi/types/sourcefield.go index e0988d8334..07b8191034 100644 --- a/typedapi/types/sourcefield.go +++ b/typedapi/types/sourcefield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // SourceField type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/meta-fields.ts#L58-L65 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/meta-fields.ts#L58-L65 type SourceField struct { Compress *bool `json:"compress,omitempty"` CompressThreshold *string `json:"compress_threshold,omitempty"` @@ -59,7 +59,7 @@ func (s *SourceField) UnmarshalJSON(data []byte) error { switch t { case "compress": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *SourceField) UnmarshalJSON(data []byte) error { s.CompressThreshold = &o case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/sourcefilter.go b/typedapi/types/sourcefilter.go index 98894d9b8b..d4aac50625 100644 --- a/typedapi/types/sourcefilter.go +++ b/typedapi/types/sourcefilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SourceFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/SourceFilter.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/SourceFilter.ts#L23-L31 type SourceFilter struct { Excludes []string `json:"excludes,omitempty"` Includes []string `json:"includes,omitempty"` diff --git a/typedapi/types/sourceonlyrepository.go b/typedapi/types/sourceonlyrepository.go index 3322b6487d..129fd86f74 100644 --- a/typedapi/types/sourceonlyrepository.go +++ b/typedapi/types/sourceonlyrepository.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SourceOnlyRepository type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotRepository.ts#L65-L68 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotRepository.ts#L65-L68 type SourceOnlyRepository struct { Settings SourceOnlyRepositorySettings `json:"settings"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/sourceonlyrepositorysettings.go b/typedapi/types/sourceonlyrepositorysettings.go index 668157f83f..6b08325791 100644 --- a/typedapi/types/sourceonlyrepositorysettings.go +++ b/typedapi/types/sourceonlyrepositorysettings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SourceOnlyRepositorySettings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotRepository.ts#L117-L124 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotRepository.ts#L117-L124 type SourceOnlyRepositorySettings struct { ChunkSize ByteSize `json:"chunk_size,omitempty"` Compress *bool `json:"compress,omitempty"` @@ -63,7 +63,7 @@ func (s *SourceOnlyRepositorySettings) UnmarshalJSON(data []byte) error { } case "compress": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -90,7 +90,7 @@ func (s *SourceOnlyRepositorySettings) UnmarshalJSON(data []byte) error { case "max_number_of_snapshots": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -115,7 +115,7 @@ func (s *SourceOnlyRepositorySettings) UnmarshalJSON(data []byte) error { } case "read_only", "readonly": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/spancontainingquery.go b/typedapi/types/spancontainingquery.go index 5d88cceffa..f288de8c70 100644 --- a/typedapi/types/spancontainingquery.go +++ b/typedapi/types/spancontainingquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SpanContainingQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/span.ts#L25-L36 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/span.ts#L25-L36 type SpanContainingQuery struct { // Big Can be any span query. // Matching spans from `big` that contain matches from `little` are returned. @@ -69,7 +69,7 @@ func (s *SpanContainingQuery) UnmarshalJSON(data []byte) error { } case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/spanfieldmaskingquery.go b/typedapi/types/spanfieldmaskingquery.go index 56472ef841..4cc68e3963 100644 --- a/typedapi/types/spanfieldmaskingquery.go +++ b/typedapi/types/spanfieldmaskingquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SpanFieldMaskingQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/span.ts#L38-L41 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/span.ts#L38-L41 type SpanFieldMaskingQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -60,7 +60,7 @@ func (s *SpanFieldMaskingQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/spanfirstquery.go b/typedapi/types/spanfirstquery.go index 743dc7ff80..1d5e45ca3f 100644 --- a/typedapi/types/spanfirstquery.go +++ b/typedapi/types/spanfirstquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SpanFirstQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/span.ts#L43-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/span.ts#L43-L52 type SpanFirstQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -62,7 +62,7 @@ func (s *SpanFirstQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -79,7 +79,7 @@ func (s *SpanFirstQuery) UnmarshalJSON(data []byte) error { case "end": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/spangapquery.go b/typedapi/types/spangapquery.go index 0b5181e9f9..0ebb6e9afa 100644 --- a/typedapi/types/spangapquery.go +++ b/typedapi/types/spangapquery.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // SpanGapQuery type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/span.ts#L54-L56 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/span.ts#L54-L56 type SpanGapQuery map[string]int diff --git a/typedapi/types/spanmultitermquery.go b/typedapi/types/spanmultitermquery.go index 3f94afca27..4c38c8c02d 100644 --- a/typedapi/types/spanmultitermquery.go +++ b/typedapi/types/spanmultitermquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SpanMultiTermQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/span.ts#L58-L63 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/span.ts#L58-L63 type SpanMultiTermQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -61,7 +61,7 @@ func (s *SpanMultiTermQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/spannearquery.go b/typedapi/types/spannearquery.go index f55cc888b7..f1320ccff7 100644 --- a/typedapi/types/spannearquery.go +++ b/typedapi/types/spannearquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SpanNearQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/span.ts#L65-L78 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/span.ts#L65-L78 type SpanNearQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -64,7 +64,7 @@ func (s *SpanNearQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *SpanNearQuery) UnmarshalJSON(data []byte) error { } case "in_order": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -112,7 +112,7 @@ func (s *SpanNearQuery) UnmarshalJSON(data []byte) error { case "slop": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/spannotquery.go b/typedapi/types/spannotquery.go index 6668b45d5a..165f0acf23 100644 --- a/typedapi/types/spannotquery.go +++ b/typedapi/types/spannotquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SpanNotQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/span.ts#L80-L104 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/span.ts#L80-L104 type SpanNotQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -72,7 +72,7 @@ func (s *SpanNotQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -89,7 +89,7 @@ func (s *SpanNotQuery) UnmarshalJSON(data []byte) error { case "dist": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -115,7 +115,7 @@ func (s *SpanNotQuery) UnmarshalJSON(data []byte) error { case "post": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -131,7 +131,7 @@ func (s *SpanNotQuery) UnmarshalJSON(data []byte) error { case "pre": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/spanorquery.go b/typedapi/types/spanorquery.go index eb027b8a81..d6893717cf 100644 --- a/typedapi/types/spanorquery.go +++ b/typedapi/types/spanorquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SpanOrQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/span.ts#L106-L111 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/span.ts#L106-L111 type SpanOrQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -60,7 +60,7 @@ func (s *SpanOrQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/spanquery.go b/typedapi/types/spanquery.go index ac85cabd12..33bed0ba30 100644 --- a/typedapi/types/spanquery.go +++ b/typedapi/types/spanquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,13 +30,13 @@ import ( // SpanQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/span.ts#L131-L170 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/span.ts#L131-L173 type SpanQuery struct { - // FieldMaskingSpan Allows queries like `span_near` or `span_or` across different fields. - FieldMaskingSpan *SpanFieldMaskingQuery `json:"field_masking_span,omitempty"` // SpanContaining Accepts a list of span queries, but only returns those spans which also match // a second span query. SpanContaining *SpanContainingQuery `json:"span_containing,omitempty"` + // SpanFieldMasking Allows queries like `span_near` or `span_or` across different fields. + SpanFieldMasking *SpanFieldMaskingQuery `json:"span_field_masking,omitempty"` // SpanFirst Accepts another span query whose matches must appear within the first N // positions of the field. SpanFirst *SpanFirstQuery `json:"span_first,omitempty"` @@ -73,16 +73,16 @@ func (s *SpanQuery) UnmarshalJSON(data []byte) error { switch t { - case "field_masking_span": - if err := dec.Decode(&s.FieldMaskingSpan); err != nil { - return fmt.Errorf("%s | %w", "FieldMaskingSpan", err) - } - case "span_containing": if err := dec.Decode(&s.SpanContaining); err != nil { return fmt.Errorf("%s | %w", "SpanContaining", err) } + case "span_field_masking": + if err := dec.Decode(&s.SpanFieldMasking); err != nil { + return fmt.Errorf("%s | %w", "SpanFieldMasking", err) + } + case "span_first": if err := dec.Decode(&s.SpanFirst); err != nil { return fmt.Errorf("%s | %w", "SpanFirst", err) diff --git a/typedapi/types/spantermquery.go b/typedapi/types/spantermquery.go index 42fa31956d..6e394b4259 100644 --- a/typedapi/types/spantermquery.go +++ b/typedapi/types/spantermquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SpanTermQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/span.ts#L113-L116 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/span.ts#L113-L116 type SpanTermQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -68,7 +68,7 @@ func (s *SpanTermQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/spanwithinquery.go b/typedapi/types/spanwithinquery.go index d4fed3e651..d54bee6705 100644 --- a/typedapi/types/spanwithinquery.go +++ b/typedapi/types/spanwithinquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SpanWithinQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/span.ts#L118-L129 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/span.ts#L118-L129 type SpanWithinQuery struct { // Big Can be any span query. // Matching spans from `little` that are enclosed within `big` are returned. @@ -69,7 +69,7 @@ func (s *SpanWithinQuery) UnmarshalJSON(data []byte) error { } case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/sparseembeddingresult.go b/typedapi/types/sparseembeddingresult.go index f1b56480f0..8169a24b74 100644 --- a/typedapi/types/sparseembeddingresult.go +++ b/typedapi/types/sparseembeddingresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SparseEmbeddingResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/inference/_types/Results.ts#L35-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/_types/Results.ts#L36-L38 type SparseEmbeddingResult struct { Embedding SparseVector `json:"embedding"` } diff --git a/typedapi/types/sparsevector.go b/typedapi/types/sparsevector.go index bd029b50b0..0f98e37145 100644 --- a/typedapi/types/sparsevector.go +++ b/typedapi/types/sparsevector.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // SparseVector type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/inference/_types/Results.ts#L23-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/_types/Results.ts#L24-L28 type SparseVector map[string]float32 diff --git a/typedapi/types/sparsevectorproperty.go b/typedapi/types/sparsevectorproperty.go index d0cdb423fe..4847bcf6e0 100644 --- a/typedapi/types/sparsevectorproperty.go +++ b/typedapi/types/sparsevectorproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // SparseVectorProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L194-L196 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L202-L204 type SparseVectorProperty struct { Dynamic *dynamicmapping.DynamicMapping `json:"dynamic,omitempty"` Fields map[string]Property `json:"fields,omitempty"` @@ -71,7 +71,7 @@ func (s *SparseVectorProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -92,7 +92,7 @@ func (s *SparseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -182,12 +182,6 @@ func (s *SparseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -206,6 +200,18 @@ func (s *SparseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -362,6 +368,12 @@ func (s *SparseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -373,7 +385,7 @@ func (s *SparseVectorProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -402,7 +414,7 @@ func (s *SparseVectorProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -423,7 +435,7 @@ func (s *SparseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -513,12 +525,6 @@ func (s *SparseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -537,6 +543,18 @@ func (s *SparseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -693,6 +711,12 @@ func (s *SparseVectorProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { diff --git a/typedapi/types/sparsevectorquery.go b/typedapi/types/sparsevectorquery.go new file mode 100644 index 0000000000..3cfea1ac1f --- /dev/null +++ b/typedapi/types/sparsevectorquery.go @@ -0,0 +1,179 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// SparseVectorQuery type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/SparseVectorQuery.ts#L26-L79 +type SparseVectorQuery struct { + // Boost Floating point number used to decrease or increase the relevance scores of + // the query. + // Boost values are relative to the default value of 1.0. + // A boost value between 0 and 1.0 decreases the relevance score. + // A value greater than 1.0 increases the relevance score. + Boost *float32 `json:"boost,omitempty"` + // Field The name of the field that contains the token-weight pairs to be searched + // against. + // This field must be a mapped sparse_vector field. + Field string `json:"field"` + // InferenceId The inference ID to use to convert the query text into token-weight pairs. + // It must be the same inference ID that was used to create the tokens from the + // input text. + // Only one of inference_id and query_vector is allowed. + // If inference_id is specified, query must also be specified. + // Only one of inference_id or query_vector may be supplied in a request. + InferenceId *string `json:"inference_id,omitempty"` + // Prune Whether to perform pruning, omitting the non-significant tokens from the + // query to improve query performance. + // If prune is true but the pruning_config is not specified, pruning will occur + // but default values will be used. + // Default: false + Prune *bool `json:"prune,omitempty"` + // PruningConfig Optional pruning configuration. + // If enabled, this will omit non-significant tokens from the query in order to + // improve query performance. + // This is only used if prune is set to true. + // If prune is set to true but pruning_config is not specified, default values + // will be used. + PruningConfig *TokenPruningConfig `json:"pruning_config,omitempty"` + // Query The query text you want to use for search. + // If inference_id is specified, query must also be specified. + Query *string `json:"query,omitempty"` + QueryName_ *string `json:"_name,omitempty"` + // QueryVector Dictionary of precomputed sparse vectors and their associated weights. + // Only one of inference_id or query_vector may be supplied in a request. + QueryVector map[string]float32 `json:"query_vector,omitempty"` +} + +func (s *SparseVectorQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return fmt.Errorf("%s | %w", "Boost", err) + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return fmt.Errorf("%s | %w", "Field", err) + } + + case "inference_id": + if err := dec.Decode(&s.InferenceId); err != nil { + return fmt.Errorf("%s | %w", "InferenceId", err) + } + + case "prune": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseBool(v) + if err != nil { + return fmt.Errorf("%s | %w", "Prune", err) + } + s.Prune = &value + case bool: + s.Prune = &v + } + + case "pruning_config": + if err := dec.Decode(&s.PruningConfig); err != nil { + return fmt.Errorf("%s | %w", "PruningConfig", err) + } + + case "query": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Query", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Query = &o + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "QueryName_", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.QueryName_ = &o + + case "query_vector": + if s.QueryVector == nil { + s.QueryVector = make(map[string]float32, 0) + } + if err := dec.Decode(&s.QueryVector); err != nil { + return fmt.Errorf("%s | %w", "QueryVector", err) + } + + } + } + return nil +} + +// NewSparseVectorQuery returns a SparseVectorQuery. +func NewSparseVectorQuery() *SparseVectorQuery { + r := &SparseVectorQuery{ + QueryVector: make(map[string]float32, 0), + } + + return r +} diff --git a/typedapi/types/splitprocessor.go b/typedapi/types/splitprocessor.go index 71e7e65695..4b369f6cb2 100644 --- a/typedapi/types/splitprocessor.go +++ b/typedapi/types/splitprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SplitProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L1093-L1118 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L1097-L1122 type SplitProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -104,7 +104,7 @@ func (s *SplitProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -118,7 +118,7 @@ func (s *SplitProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -137,7 +137,7 @@ func (s *SplitProcessor) UnmarshalJSON(data []byte) error { } case "preserve_trailing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/sql.go b/typedapi/types/sql.go index baf15a8688..be4c131bbe 100644 --- a/typedapi/types/sql.go +++ b/typedapi/types/sql.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Sql type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L386-L389 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L386-L389 type Sql struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -55,7 +55,7 @@ func (s *Sql) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -69,7 +69,7 @@ func (s *Sql) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/ssl.go b/typedapi/types/ssl.go index 5eac9ce816..e96d4c1fc2 100644 --- a/typedapi/types/ssl.go +++ b/typedapi/types/ssl.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Ssl type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L391-L394 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L391-L394 type Ssl struct { Http FeatureToggle `json:"http"` Transport FeatureToggle `json:"transport"` diff --git a/typedapi/types/standardanalyzer.go b/typedapi/types/standardanalyzer.go index 5988c03cc3..173ba6b91d 100644 --- a/typedapi/types/standardanalyzer.go +++ b/typedapi/types/standardanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // StandardAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/analyzers.ts#L95-L99 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/analyzers.ts#L95-L99 type StandardAnalyzer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Stopwords []string `json:"stopwords,omitempty"` @@ -55,7 +55,7 @@ func (s *StandardAnalyzer) UnmarshalJSON(data []byte) error { case "max_token_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/standarddeviationbounds.go b/typedapi/types/standarddeviationbounds.go index 86e4ec014d..89bccbf50b 100644 --- a/typedapi/types/standarddeviationbounds.go +++ b/typedapi/types/standarddeviationbounds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,14 +30,14 @@ import ( // StandardDeviationBounds type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L260-L267 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L260-L267 type StandardDeviationBounds struct { - Lower Float64 `json:"lower,omitempty"` - LowerPopulation Float64 `json:"lower_population,omitempty"` - LowerSampling Float64 `json:"lower_sampling,omitempty"` - Upper Float64 `json:"upper,omitempty"` - UpperPopulation Float64 `json:"upper_population,omitempty"` - UpperSampling Float64 `json:"upper_sampling,omitempty"` + Lower *Float64 `json:"lower,omitempty"` + LowerPopulation *Float64 `json:"lower_population,omitempty"` + LowerSampling *Float64 `json:"lower_sampling,omitempty"` + Upper *Float64 `json:"upper,omitempty"` + UpperPopulation *Float64 `json:"upper_population,omitempty"` + UpperSampling *Float64 `json:"upper_sampling,omitempty"` } func (s *StandardDeviationBounds) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/standarddeviationboundsasstring.go b/typedapi/types/standarddeviationboundsasstring.go index 41d438eb40..36455bf8af 100644 --- a/typedapi/types/standarddeviationboundsasstring.go +++ b/typedapi/types/standarddeviationboundsasstring.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // StandardDeviationBoundsAsString type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L269-L276 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L269-L276 type StandardDeviationBoundsAsString struct { Lower string `json:"lower"` LowerPopulation string `json:"lower_population"` diff --git a/typedapi/types/standardretriever.go b/typedapi/types/standardretriever.go new file mode 100644 index 0000000000..0d88497206 --- /dev/null +++ b/typedapi/types/standardretriever.go @@ -0,0 +1,158 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// StandardRetriever type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Retriever.ts#L43-L56 +type StandardRetriever struct { + // Collapse Collapses the top documents by a specified key into a single top document per + // key. + Collapse *FieldCollapse `json:"collapse,omitempty"` + // Filter Query to filter the documents that can match. + Filter []Query `json:"filter,omitempty"` + // MinScore Minimum _score for matching documents. Documents with a lower _score are not + // included in the top documents. + MinScore *float32 `json:"min_score,omitempty"` + // Query Defines a query to retrieve a set of top documents. + Query *Query `json:"query,omitempty"` + // SearchAfter Defines a search after object parameter used for pagination. + SearchAfter []FieldValue `json:"search_after,omitempty"` + // Sort A sort object that that specifies the order of matching documents. + Sort []SortCombinations `json:"sort,omitempty"` + // TerminateAfter Maximum number of documents to collect for each shard. + TerminateAfter *int `json:"terminate_after,omitempty"` +} + +func (s *StandardRetriever) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "collapse": + if err := dec.Decode(&s.Collapse); err != nil { + return fmt.Errorf("%s | %w", "Collapse", err) + } + + case "filter": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := NewQuery() + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Filter", err) + } + + s.Filter = append(s.Filter, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Filter); err != nil { + return fmt.Errorf("%s | %w", "Filter", err) + } + } + + case "min_score": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return fmt.Errorf("%s | %w", "MinScore", err) + } + f := float32(value) + s.MinScore = &f + case float64: + f := float32(v) + s.MinScore = &f + } + + case "query": + if err := dec.Decode(&s.Query); err != nil { + return fmt.Errorf("%s | %w", "Query", err) + } + + case "search_after": + if err := dec.Decode(&s.SearchAfter); err != nil { + return fmt.Errorf("%s | %w", "SearchAfter", err) + } + + case "sort": + rawMsg := json.RawMessage{} + dec.Decode(&rawMsg) + if !bytes.HasPrefix(rawMsg, []byte("[")) { + o := new(SortCombinations) + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Sort", err) + } + + s.Sort = append(s.Sort, *o) + } else { + if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.Sort); err != nil { + return fmt.Errorf("%s | %w", "Sort", err) + } + } + + case "terminate_after": + + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.Atoi(v) + if err != nil { + return fmt.Errorf("%s | %w", "TerminateAfter", err) + } + s.TerminateAfter = &value + case float64: + f := int(v) + s.TerminateAfter = &f + } + + } + } + return nil +} + +// NewStandardRetriever returns a StandardRetriever. +func NewStandardRetriever() *StandardRetriever { + r := &StandardRetriever{} + + return r +} diff --git a/typedapi/types/standardtokenizer.go b/typedapi/types/standardtokenizer.go index 8a1eb3c271..19ee4f97c2 100644 --- a/typedapi/types/standardtokenizer.go +++ b/typedapi/types/standardtokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // StandardTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L105-L108 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L105-L108 type StandardTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Type string `json:"type,omitempty"` @@ -55,7 +55,7 @@ func (s *StandardTokenizer) UnmarshalJSON(data []byte) error { case "max_token_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/statistics.go b/typedapi/types/statistics.go index be29074023..8167dc39bc 100644 --- a/typedapi/types/statistics.go +++ b/typedapi/types/statistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Statistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/slm/_types/SnapshotLifecycle.ts#L51-L74 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/slm/_types/SnapshotLifecycle.ts#L51-L74 type Statistics struct { Policy *string `json:"policy,omitempty"` RetentionDeletionTime Duration `json:"retention_deletion_time,omitempty"` @@ -76,7 +76,7 @@ func (s *Statistics) UnmarshalJSON(data []byte) error { } case "retention_failed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *Statistics) UnmarshalJSON(data []byte) error { } case "retention_runs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -106,7 +106,7 @@ func (s *Statistics) UnmarshalJSON(data []byte) error { } case "retention_timed_out": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -121,7 +121,7 @@ func (s *Statistics) UnmarshalJSON(data []byte) error { } case "total_snapshot_deletion_failures", "snapshot_deletion_failures": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -136,7 +136,7 @@ func (s *Statistics) UnmarshalJSON(data []byte) error { } case "total_snapshots_deleted", "snapshots_deleted": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -151,7 +151,7 @@ func (s *Statistics) UnmarshalJSON(data []byte) error { } case "total_snapshots_failed", "snapshots_failed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -166,7 +166,7 @@ func (s *Statistics) UnmarshalJSON(data []byte) error { } case "total_snapshots_taken", "snapshots_taken": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/stats.go b/typedapi/types/stats.go index 0c80ba7ca4..4556f3687c 100644 --- a/typedapi/types/stats.go +++ b/typedapi/types/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // Stats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L30-L114 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L30-L114 type Stats struct { // AdaptiveSelection Statistics about adaptive replica selection. AdaptiveSelection map[string]AdaptiveSelection `json:"adaptive_selection,omitempty"` @@ -238,7 +238,7 @@ func (s *Stats) UnmarshalJSON(data []byte) error { } case "timestamp": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/statsaggregate.go b/typedapi/types/statsaggregate.go index 8376d4dca0..4c422a29e9 100644 --- a/typedapi/types/statsaggregate.go +++ b/typedapi/types/statsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,15 +31,15 @@ import ( // StatsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L240-L255 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L240-L255 type StatsAggregate struct { - Avg Float64 `json:"avg,omitempty"` + Avg *Float64 `json:"avg,omitempty"` AvgAsString *string `json:"avg_as_string,omitempty"` Count int64 `json:"count"` - Max Float64 `json:"max,omitempty"` + Max *Float64 `json:"max,omitempty"` MaxAsString *string `json:"max_as_string,omitempty"` Meta Metadata `json:"meta,omitempty"` - Min Float64 `json:"min,omitempty"` + Min *Float64 `json:"min,omitempty"` MinAsString *string `json:"min_as_string,omitempty"` Sum Float64 `json:"sum"` SumAsString *string `json:"sum_as_string,omitempty"` @@ -78,7 +78,7 @@ func (s *StatsAggregate) UnmarshalJSON(data []byte) error { s.AvgAsString = &o case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -132,7 +132,7 @@ func (s *StatsAggregate) UnmarshalJSON(data []byte) error { s.MinAsString = &o case "sum": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/statsaggregation.go b/typedapi/types/statsaggregation.go index c8e11894d6..45d3285a6b 100644 --- a/typedapi/types/statsaggregation.go +++ b/typedapi/types/statsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // StatsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L282-L282 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L282-L282 type StatsAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` diff --git a/typedapi/types/statsbucketaggregate.go b/typedapi/types/statsbucketaggregate.go index dbdfdc856d..fd38145471 100644 --- a/typedapi/types/statsbucketaggregate.go +++ b/typedapi/types/statsbucketaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,15 +31,15 @@ import ( // StatsBucketAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L257-L258 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L257-L258 type StatsBucketAggregate struct { - Avg Float64 `json:"avg,omitempty"` + Avg *Float64 `json:"avg,omitempty"` AvgAsString *string `json:"avg_as_string,omitempty"` Count int64 `json:"count"` - Max Float64 `json:"max,omitempty"` + Max *Float64 `json:"max,omitempty"` MaxAsString *string `json:"max_as_string,omitempty"` Meta Metadata `json:"meta,omitempty"` - Min Float64 `json:"min,omitempty"` + Min *Float64 `json:"min,omitempty"` MinAsString *string `json:"min_as_string,omitempty"` Sum Float64 `json:"sum"` SumAsString *string `json:"sum_as_string,omitempty"` @@ -78,7 +78,7 @@ func (s *StatsBucketAggregate) UnmarshalJSON(data []byte) error { s.AvgAsString = &o case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -132,7 +132,7 @@ func (s *StatsBucketAggregate) UnmarshalJSON(data []byte) error { s.MinAsString = &o case "sum": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/statsbucketaggregation.go b/typedapi/types/statsbucketaggregation.go index 29a2991559..90d1dbf174 100644 --- a/typedapi/types/statsbucketaggregation.go +++ b/typedapi/types/statsbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // StatsBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L369-L369 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L369-L369 type StatsBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,8 +43,6 @@ type StatsBucketAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` } func (s *StatsBucketAggregation) UnmarshalJSON(data []byte) error { @@ -84,23 +82,6 @@ func (s *StatsBucketAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/status.go b/typedapi/types/status.go index 0834379a87..8b41867f2a 100644 --- a/typedapi/types/status.go +++ b/typedapi/types/status.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Status type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/snapshot/_types/SnapshotStatus.ts#L26-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/snapshot/_types/SnapshotStatus.ts#L26-L35 type Status struct { IncludeGlobalState bool `json:"include_global_state"` Indices map[string]SnapshotIndexStats `json:"indices"` @@ -59,7 +59,7 @@ func (s *Status) UnmarshalJSON(data []byte) error { switch t { case "include_global_state": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/stemmeroverridetokenfilter.go b/typedapi/types/stemmeroverridetokenfilter.go index 75cf693f2b..aaf458dfd9 100644 --- a/typedapi/types/stemmeroverridetokenfilter.go +++ b/typedapi/types/stemmeroverridetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // StemmerOverrideTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L314-L318 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L316-L320 type StemmerOverrideTokenFilter struct { Rules []string `json:"rules,omitempty"` RulesPath *string `json:"rules_path,omitempty"` diff --git a/typedapi/types/stemmertokenfilter.go b/typedapi/types/stemmertokenfilter.go index fa7623aa0b..7ba33d3e1e 100644 --- a/typedapi/types/stemmertokenfilter.go +++ b/typedapi/types/stemmertokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // StemmerTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L320-L324 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L322-L326 type StemmerTokenFilter struct { Language *string `json:"language,omitempty"` Type string `json:"type,omitempty"` diff --git a/typedapi/types/stepkey.go b/typedapi/types/stepkey.go index 608c387668..b083792823 100644 --- a/typedapi/types/stepkey.go +++ b/typedapi/types/stepkey.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // StepKey type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ilm/move_to_step/types.ts#L20-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ilm/move_to_step/types.ts#L20-L24 type StepKey struct { Action string `json:"action"` Name string `json:"name"` diff --git a/typedapi/types/stopanalyzer.go b/typedapi/types/stopanalyzer.go index dcf4be407b..fc510d722a 100644 --- a/typedapi/types/stopanalyzer.go +++ b/typedapi/types/stopanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // StopAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/analyzers.ts#L101-L106 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/analyzers.ts#L101-L106 type StopAnalyzer struct { Stopwords []string `json:"stopwords,omitempty"` StopwordsPath *string `json:"stopwords_path,omitempty"` diff --git a/typedapi/types/stoptokenfilter.go b/typedapi/types/stoptokenfilter.go index 4964985aa1..c8b55c777f 100644 --- a/typedapi/types/stoptokenfilter.go +++ b/typedapi/types/stoptokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // StopTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L97-L103 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L97-L103 type StopTokenFilter struct { IgnoreCase *bool `json:"ignore_case,omitempty"` RemoveTrailing *bool `json:"remove_trailing,omitempty"` @@ -57,7 +57,7 @@ func (s *StopTokenFilter) UnmarshalJSON(data []byte) error { switch t { case "ignore_case": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *StopTokenFilter) UnmarshalJSON(data []byte) error { } case "remove_trailing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/stopwords.go b/typedapi/types/stopwords.go index ac49ea8e6a..9bf3bf30e6 100644 --- a/typedapi/types/stopwords.go +++ b/typedapi/types/stopwords.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // StopWords type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/StopWords.ts#L20-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/StopWords.ts#L20-L26 type StopWords []string diff --git a/typedapi/types/storage.go b/typedapi/types/storage.go index 865b0d263e..d347e8d2af 100644 --- a/typedapi/types/storage.go +++ b/typedapi/types/storage.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // Storage type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L498-L507 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L509-L518 type Storage struct { // AllowMmap You can restrict the use of the mmapfs and the related hybridfs store type // via the setting node.store.allow_mmap. @@ -62,7 +62,7 @@ func (s *Storage) UnmarshalJSON(data []byte) error { switch t { case "allow_mmap": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/storedscript.go b/typedapi/types/storedscript.go index 51fc89c07a..cf71e366f2 100644 --- a/typedapi/types/storedscript.go +++ b/typedapi/types/storedscript.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // StoredScript type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Scripting.ts#L47-L57 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Scripting.ts#L47-L57 type StoredScript struct { // Lang Specifies the language the script is written in. Lang scriptlanguage.ScriptLanguage `json:"lang"` diff --git a/typedapi/types/storedscriptid.go b/typedapi/types/storedscriptid.go index d29fa39418..e15626c8db 100644 --- a/typedapi/types/storedscriptid.go +++ b/typedapi/types/storedscriptid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // StoredScriptId type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Scripting.ts#L81-L86 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Scripting.ts#L81-L86 type StoredScriptId struct { // Id The `id` for a stored script. Id string `json:"id"` diff --git a/typedapi/types/storestats.go b/typedapi/types/storestats.go index d43f2d720c..b4ed64472f 100644 --- a/typedapi/types/storestats.go +++ b/typedapi/types/storestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // StoreStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L368-L395 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L368-L395 type StoreStats struct { // Reserved A prediction of how much larger the shard stores will eventually grow due to // ongoing peer recoveries, restoring snapshots, and similar activities. @@ -75,7 +75,7 @@ func (s *StoreStats) UnmarshalJSON(data []byte) error { } case "reserved_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *StoreStats) UnmarshalJSON(data []byte) error { } case "size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -115,7 +115,7 @@ func (s *StoreStats) UnmarshalJSON(data []byte) error { } case "total_data_set_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/stringifiedboolean.go b/typedapi/types/stringifiedboolean.go index a4ab4d636d..f421981a5f 100644 --- a/typedapi/types/stringifiedboolean.go +++ b/typedapi/types/stringifiedboolean.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // bool // string // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_spec_utils/Stringified.ts#L20-L27 -type Stringifiedboolean interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_spec_utils/Stringified.ts#L20-L27 +type Stringifiedboolean any diff --git a/typedapi/types/stringifiedepochtimeunitmillis.go b/typedapi/types/stringifiedepochtimeunitmillis.go index 14af2a0376..dc9b3b21da 100644 --- a/typedapi/types/stringifiedepochtimeunitmillis.go +++ b/typedapi/types/stringifiedepochtimeunitmillis.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // int64 // string // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_spec_utils/Stringified.ts#L20-L27 -type StringifiedEpochTimeUnitMillis interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_spec_utils/Stringified.ts#L20-L27 +type StringifiedEpochTimeUnitMillis any diff --git a/typedapi/types/stringifiedepochtimeunitseconds.go b/typedapi/types/stringifiedepochtimeunitseconds.go index 6c5ab2b2a8..da43bf5a7e 100644 --- a/typedapi/types/stringifiedepochtimeunitseconds.go +++ b/typedapi/types/stringifiedepochtimeunitseconds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // int64 // string // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_spec_utils/Stringified.ts#L20-L27 -type StringifiedEpochTimeUnitSeconds interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_spec_utils/Stringified.ts#L20-L27 +type StringifiedEpochTimeUnitSeconds any diff --git a/typedapi/types/stringifiedinteger.go b/typedapi/types/stringifiedinteger.go index 67053fe86e..fca4b612d6 100644 --- a/typedapi/types/stringifiedinteger.go +++ b/typedapi/types/stringifiedinteger.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // int // string // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_spec_utils/Stringified.ts#L20-L27 -type Stringifiedinteger interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_spec_utils/Stringified.ts#L20-L27 +type Stringifiedinteger any diff --git a/typedapi/types/stringifiedversionnumber.go b/typedapi/types/stringifiedversionnumber.go index dd13a12cdc..d13213ad65 100644 --- a/typedapi/types/stringifiedversionnumber.go +++ b/typedapi/types/stringifiedversionnumber.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // int64 // string // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_spec_utils/Stringified.ts#L20-L27 -type StringifiedVersionNumber interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_spec_utils/Stringified.ts#L20-L27 +type StringifiedVersionNumber any diff --git a/typedapi/types/stringraretermsaggregate.go b/typedapi/types/stringraretermsaggregate.go index 59c13b9fa7..7822dc131a 100644 --- a/typedapi/types/stringraretermsaggregate.go +++ b/typedapi/types/stringraretermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // StringRareTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L443-L447 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L445-L449 type StringRareTermsAggregate struct { Buckets BucketsStringRareTermsBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/stringraretermsbucket.go b/typedapi/types/stringraretermsbucket.go index 3cf2358b51..f2cbcce500 100644 --- a/typedapi/types/stringraretermsbucket.go +++ b/typedapi/types/stringraretermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // StringRareTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L449-L451 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L451-L453 type StringRareTermsBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -55,7 +55,7 @@ func (s *StringRareTermsBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -526,7 +526,7 @@ func (s *StringRareTermsBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -576,7 +576,7 @@ func (s *StringRareTermsBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -586,7 +586,7 @@ func (s *StringRareTermsBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -603,7 +603,7 @@ func (s *StringRareTermsBucket) UnmarshalJSON(data []byte) error { func (s StringRareTermsBucket) MarshalJSON() ([]byte, error) { type opt StringRareTermsBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/stringstatsaggregate.go b/typedapi/types/stringstatsaggregate.go index 629bac4e30..601d52c4d5 100644 --- a/typedapi/types/stringstatsaggregate.go +++ b/typedapi/types/stringstatsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,18 +31,18 @@ import ( // StringStatsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L693-L704 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L700-L711 type StringStatsAggregate struct { - AvgLength Float64 `json:"avg_length,omitempty"` - AvgLengthAsString *string `json:"avg_length_as_string,omitempty"` - Count int64 `json:"count"` - Distribution map[string]Float64 `json:"distribution,omitempty"` - Entropy Float64 `json:"entropy,omitempty"` - MaxLength int `json:"max_length,omitempty"` - MaxLengthAsString *string `json:"max_length_as_string,omitempty"` - Meta Metadata `json:"meta,omitempty"` - MinLength int `json:"min_length,omitempty"` - MinLengthAsString *string `json:"min_length_as_string,omitempty"` + AvgLength *Float64 `json:"avg_length,omitempty"` + AvgLengthAsString *string `json:"avg_length_as_string,omitempty"` + Count int64 `json:"count"` + Distribution *map[string]Float64 `json:"distribution,omitempty"` + Entropy *Float64 `json:"entropy,omitempty"` + MaxLength *int `json:"max_length,omitempty"` + MaxLengthAsString *string `json:"max_length_as_string,omitempty"` + Meta Metadata `json:"meta,omitempty"` + MinLength *int `json:"min_length,omitempty"` + MinLengthAsString *string `json:"min_length_as_string,omitempty"` } func (s *StringStatsAggregate) UnmarshalJSON(data []byte) error { @@ -78,7 +78,7 @@ func (s *StringStatsAggregate) UnmarshalJSON(data []byte) error { s.AvgLengthAsString = &o case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/stringstatsaggregation.go b/typedapi/types/stringstatsaggregation.go index b39df3fcbd..00c74f2dfe 100644 --- a/typedapi/types/stringstatsaggregation.go +++ b/typedapi/types/stringstatsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // StringStatsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L284-L290 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L284-L290 type StringStatsAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -105,7 +105,7 @@ func (s *StringStatsAggregation) UnmarshalJSON(data []byte) error { } case "show_distribution": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/stringtermsaggregate.go b/typedapi/types/stringtermsaggregate.go index 225cbbd638..b26e0fd904 100644 --- a/typedapi/types/stringtermsaggregate.go +++ b/typedapi/types/stringtermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // StringTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L384-L389 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L386-L391 type StringTermsAggregate struct { Buckets BucketsStringTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -76,7 +76,7 @@ func (s *StringTermsAggregate) UnmarshalJSON(data []byte) error { } case "doc_count_error_upper_bound": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *StringTermsAggregate) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/stringtermsbucket.go b/typedapi/types/stringtermsbucket.go index 919948c218..14c75e9a84 100644 --- a/typedapi/types/stringtermsbucket.go +++ b/typedapi/types/stringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,12 +32,12 @@ import ( // StringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L395-L397 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L397-L399 type StringTermsBucket struct { - Aggregations map[string]Aggregate `json:"-"` - DocCount int64 `json:"doc_count"` - DocCountError *int64 `json:"doc_count_error,omitempty"` - Key FieldValue `json:"key"` + Aggregations map[string]Aggregate `json:"-"` + DocCount int64 `json:"doc_count"` + DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` + Key FieldValue `json:"key"` } func (s *StringTermsBucket) UnmarshalJSON(data []byte) error { @@ -56,7 +56,7 @@ func (s *StringTermsBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,19 +70,19 @@ func (s *StringTermsBucket) UnmarshalJSON(data []byte) error { s.DocCount = f } - case "doc_count_error": - var tmp interface{} + case "doc_count_error_upper_bound": + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: value, err := strconv.ParseInt(v, 10, 64) if err != nil { - return fmt.Errorf("%s | %w", "DocCountError", err) + return fmt.Errorf("%s | %w", "DocCountErrorUpperBound", err) } - s.DocCountError = &value + s.DocCountErrorUpperBound = &value case float64: f := int64(v) - s.DocCountError = &f + s.DocCountErrorUpperBound = &f } case "key": @@ -535,7 +535,7 @@ func (s *StringTermsBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -585,7 +585,7 @@ func (s *StringTermsBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -595,7 +595,7 @@ func (s *StringTermsBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -612,7 +612,7 @@ func (s *StringTermsBucket) UnmarshalJSON(data []byte) error { func (s StringTermsBucket) MarshalJSON() ([]byte, error) { type opt StringTermsBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/stupidbackoffsmoothingmodel.go b/typedapi/types/stupidbackoffsmoothingmodel.go index 725d49bdfe..017e30831b 100644 --- a/typedapi/types/stupidbackoffsmoothingmodel.go +++ b/typedapi/types/stupidbackoffsmoothingmodel.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // StupidBackoffSmoothingModel type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L460-L465 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L463-L468 type StupidBackoffSmoothingModel struct { // Discount A constant factor that the lower order n-gram model is discounted by. Discount Float64 `json:"discount"` @@ -53,7 +53,7 @@ func (s *StupidBackoffSmoothingModel) UnmarshalJSON(data []byte) error { switch t { case "discount": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/suggest.go b/typedapi/types/suggest.go index 1b5ed4f4b4..b0acbbe88b 100644 --- a/typedapi/types/suggest.go +++ b/typedapi/types/suggest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,5 +26,5 @@ package types // PhraseSuggest // TermSuggest // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L34-L40 -type Suggest interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L34-L40 +type Suggest any diff --git a/typedapi/types/suggestcontext.go b/typedapi/types/suggestcontext.go index 227997afd3..9fc6c645ab 100644 --- a/typedapi/types/suggestcontext.go +++ b/typedapi/types/suggestcontext.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SuggestContext type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/specialized.ts#L37-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/specialized.ts#L43-L48 type SuggestContext struct { Name string `json:"name"` Path *string `json:"path,omitempty"` diff --git a/typedapi/types/suggester.go b/typedapi/types/suggester.go index 20161e1f32..07aa3075df 100644 --- a/typedapi/types/suggester.go +++ b/typedapi/types/suggester.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Suggester type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L101-L104 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L101-L107 type Suggester struct { Suggesters map[string]FieldSuggester `json:"-"` // Text Global suggest text, to avoid repetition when the same text is used in @@ -88,7 +88,7 @@ func (s *Suggester) UnmarshalJSON(data []byte) error { func (s Suggester) MarshalJSON() ([]byte, error) { type opt Suggester // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/suggestfuzziness.go b/typedapi/types/suggestfuzziness.go index 3673bee561..9063991fae 100644 --- a/typedapi/types/suggestfuzziness.go +++ b/typedapi/types/suggestfuzziness.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SuggestFuzziness type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L193-L221 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L196-L224 type SuggestFuzziness struct { // Fuzziness The fuzziness factor. Fuzziness Fuzziness `json:"fuzziness,omitempty"` @@ -69,7 +69,7 @@ func (s *SuggestFuzziness) UnmarshalJSON(data []byte) error { case "min_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *SuggestFuzziness) UnmarshalJSON(data []byte) error { case "prefix_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *SuggestFuzziness) UnmarshalJSON(data []byte) error { } case "transpositions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *SuggestFuzziness) UnmarshalJSON(data []byte) error { } case "unicode_aware": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/sumaggregate.go b/typedapi/types/sumaggregate.go index 882cf67b4d..cf368f1eb8 100644 --- a/typedapi/types/sumaggregate.go +++ b/typedapi/types/sumaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,14 +31,14 @@ import ( // SumAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L203-L207 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L203-L207 type SumAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. - Value Float64 `json:"value,omitempty"` - ValueAsString *string `json:"value_as_string,omitempty"` + Value *Float64 `json:"value,omitempty"` + ValueAsString *string `json:"value_as_string,omitempty"` } func (s *SumAggregate) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/sumaggregation.go b/typedapi/types/sumaggregation.go index 84fdc86da5..d1e32ecd1d 100644 --- a/typedapi/types/sumaggregation.go +++ b/typedapi/types/sumaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SumAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L292-L292 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L292-L292 type SumAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` diff --git a/typedapi/types/sumbucketaggregation.go b/typedapi/types/sumbucketaggregation.go index 03022fcf4f..e2b528ebd0 100644 --- a/typedapi/types/sumbucketaggregation.go +++ b/typedapi/types/sumbucketaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // SumBucketAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/pipeline.ts#L371-L371 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/pipeline.ts#L371-L371 type SumBucketAggregation struct { // BucketsPath Path to the buckets that contain one set of values to correlate. BucketsPath BucketsPath `json:"buckets_path,omitempty"` @@ -43,8 +43,6 @@ type SumBucketAggregation struct { Format *string `json:"format,omitempty"` // GapPolicy Policy to apply when gaps are found in the data. GapPolicy *gappolicy.GapPolicy `json:"gap_policy,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` } func (s *SumBucketAggregation) UnmarshalJSON(data []byte) error { @@ -84,23 +82,6 @@ func (s *SumBucketAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "GapPolicy", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - } } return nil diff --git a/typedapi/types/summary.go b/typedapi/types/summary.go index afa898ae07..fa96628322 100644 --- a/typedapi/types/summary.go +++ b/typedapi/types/summary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // Summary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/enrich/_types/Policy.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/enrich/_types/Policy.ts#L24-L26 type Summary struct { Config map[policytype.PolicyType]EnrichPolicy `json:"config"` } diff --git a/typedapi/types/synccontainer.go b/typedapi/types/synccontainer.go index 80a903b013..ca6f82e942 100644 --- a/typedapi/types/synccontainer.go +++ b/typedapi/types/synccontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // SyncContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/_types/Transform.ts#L169-L175 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/_types/Transform.ts#L169-L175 type SyncContainer struct { // Time Specifies that the transform uses a time field to synchronize the source and // destination indices. diff --git a/typedapi/types/synonymgraphtokenfilter.go b/typedapi/types/synonymgraphtokenfilter.go index 4bd439b1e2..d9c45116a5 100644 --- a/typedapi/types/synonymgraphtokenfilter.go +++ b/typedapi/types/synonymgraphtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,13 +33,14 @@ import ( // SynonymGraphTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L110-L119 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L110-L120 type SynonymGraphTokenFilter struct { Expand *bool `json:"expand,omitempty"` Format *synonymformat.SynonymFormat `json:"format,omitempty"` Lenient *bool `json:"lenient,omitempty"` Synonyms []string `json:"synonyms,omitempty"` SynonymsPath *string `json:"synonyms_path,omitempty"` + SynonymsSet *string `json:"synonyms_set,omitempty"` Tokenizer *string `json:"tokenizer,omitempty"` Type string `json:"type,omitempty"` Updateable *bool `json:"updateable,omitempty"` @@ -62,7 +63,7 @@ func (s *SynonymGraphTokenFilter) UnmarshalJSON(data []byte) error { switch t { case "expand": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -81,7 +82,7 @@ func (s *SynonymGraphTokenFilter) UnmarshalJSON(data []byte) error { } case "lenient": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -111,6 +112,18 @@ func (s *SynonymGraphTokenFilter) UnmarshalJSON(data []byte) error { } s.SynonymsPath = &o + case "synonyms_set": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "SynonymsSet", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.SynonymsSet = &o + case "tokenizer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { @@ -129,7 +142,7 @@ func (s *SynonymGraphTokenFilter) UnmarshalJSON(data []byte) error { } case "updateable": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -161,6 +174,7 @@ func (s SynonymGraphTokenFilter) MarshalJSON() ([]byte, error) { Lenient: s.Lenient, Synonyms: s.Synonyms, SynonymsPath: s.SynonymsPath, + SynonymsSet: s.SynonymsSet, Tokenizer: s.Tokenizer, Type: s.Type, Updateable: s.Updateable, diff --git a/typedapi/types/synonymrule.go b/typedapi/types/synonymrule.go index 73622f2d05..01f7897695 100644 --- a/typedapi/types/synonymrule.go +++ b/typedapi/types/synonymrule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SynonymRule type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/synonyms/_types/SynonymRule.ts#L26-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/synonyms/_types/SynonymRule.ts#L26-L35 type SynonymRule struct { // Id Synonym Rule identifier Id *string `json:"id,omitempty"` diff --git a/typedapi/types/synonymruleread.go b/typedapi/types/synonymruleread.go index 8ec2fb61a9..dbebc4379a 100644 --- a/typedapi/types/synonymruleread.go +++ b/typedapi/types/synonymruleread.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // SynonymRuleRead type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/synonyms/_types/SynonymRule.ts#L38-L47 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/synonyms/_types/SynonymRule.ts#L38-L47 type SynonymRuleRead struct { // Id Synonym Rule identifier Id string `json:"id"` diff --git a/typedapi/types/synonymssetitem.go b/typedapi/types/synonymssetitem.go index dbc4a27b15..afa0e59167 100644 --- a/typedapi/types/synonymssetitem.go +++ b/typedapi/types/synonymssetitem.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // SynonymsSetItem type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/synonyms/get_synonyms_sets/SynonymsSetsGetResponse.ts#L30-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/synonyms/get_synonyms_sets/SynonymsSetsGetResponse.ts#L30-L39 type SynonymsSetItem struct { // Count Number of synonym rules that the synonym set contains Count int `json:"count"` @@ -56,7 +56,7 @@ func (s *SynonymsSetItem) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/synonymsupdateresult.go b/typedapi/types/synonymsupdateresult.go index 98d2a93484..5e4018c3ae 100644 --- a/typedapi/types/synonymsupdateresult.go +++ b/typedapi/types/synonymsupdateresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // SynonymsUpdateResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/synonyms/_types/SynonymsUpdateResult.ts#L23-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/synonyms/_types/SynonymsUpdateResult.ts#L23-L34 type SynonymsUpdateResult struct { // ReloadAnalyzersDetails Updating synonyms in a synonym set reloads the associated analyzers. // This is the analyzers reloading result diff --git a/typedapi/types/synonymtokenfilter.go b/typedapi/types/synonymtokenfilter.go index d641a3b40c..6b2dc6ccaa 100644 --- a/typedapi/types/synonymtokenfilter.go +++ b/typedapi/types/synonymtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,13 +33,14 @@ import ( // SynonymTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L121-L130 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L122-L132 type SynonymTokenFilter struct { Expand *bool `json:"expand,omitempty"` Format *synonymformat.SynonymFormat `json:"format,omitempty"` Lenient *bool `json:"lenient,omitempty"` Synonyms []string `json:"synonyms,omitempty"` SynonymsPath *string `json:"synonyms_path,omitempty"` + SynonymsSet *string `json:"synonyms_set,omitempty"` Tokenizer *string `json:"tokenizer,omitempty"` Type string `json:"type,omitempty"` Updateable *bool `json:"updateable,omitempty"` @@ -62,7 +63,7 @@ func (s *SynonymTokenFilter) UnmarshalJSON(data []byte) error { switch t { case "expand": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -81,7 +82,7 @@ func (s *SynonymTokenFilter) UnmarshalJSON(data []byte) error { } case "lenient": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -111,6 +112,18 @@ func (s *SynonymTokenFilter) UnmarshalJSON(data []byte) error { } s.SynonymsPath = &o + case "synonyms_set": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "SynonymsSet", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.SynonymsSet = &o + case "tokenizer": var tmp json.RawMessage if err := dec.Decode(&tmp); err != nil { @@ -129,7 +142,7 @@ func (s *SynonymTokenFilter) UnmarshalJSON(data []byte) error { } case "updateable": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -161,6 +174,7 @@ func (s SynonymTokenFilter) MarshalJSON() ([]byte, error) { Lenient: s.Lenient, Synonyms: s.Synonyms, SynonymsPath: s.SynonymsPath, + SynonymsSet: s.SynonymsSet, Tokenizer: s.Tokenizer, Type: s.Type, Updateable: s.Updateable, diff --git a/typedapi/types/targetmeanencodingpreprocessor.go b/typedapi/types/targetmeanencodingpreprocessor.go index 4db92af02b..926ab58bbb 100644 --- a/typedapi/types/targetmeanencodingpreprocessor.go +++ b/typedapi/types/targetmeanencodingpreprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TargetMeanEncodingPreprocessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model/types.ts#L49-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model/types.ts#L49-L54 type TargetMeanEncodingPreprocessor struct { DefaultValue Float64 `json:"default_value"` FeatureName string `json:"feature_name"` @@ -55,7 +55,7 @@ func (s *TargetMeanEncodingPreprocessor) UnmarshalJSON(data []byte) error { switch t { case "default_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/taskfailure.go b/typedapi/types/taskfailure.go index 74bcba2545..8974da5277 100644 --- a/typedapi/types/taskfailure.go +++ b/typedapi/types/taskfailure.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TaskFailure type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Errors.ts#L66-L71 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Errors.ts#L68-L73 type TaskFailure struct { NodeId string `json:"node_id"` Reason ErrorCause `json:"reason"` @@ -77,7 +77,7 @@ func (s *TaskFailure) UnmarshalJSON(data []byte) error { s.Status = o case "task_id": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/taskid.go b/typedapi/types/taskid.go index 2e41e18c85..9cb968abef 100644 --- a/typedapi/types/taskid.go +++ b/typedapi/types/taskid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // string // int // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L132-L132 -type TaskId interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L132-L132 +type TaskId any diff --git a/typedapi/types/taskinfo.go b/typedapi/types/taskinfo.go index 2486a92898..dbefcc297b 100644 --- a/typedapi/types/taskinfo.go +++ b/typedapi/types/taskinfo.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TaskInfo type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/tasks/_types/TaskInfo.ts#L32-L47 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/tasks/_types/TaskInfo.ts#L32-L47 type TaskInfo struct { Action string `json:"action"` Cancellable bool `json:"cancellable"` @@ -77,7 +77,7 @@ func (s *TaskInfo) UnmarshalJSON(data []byte) error { s.Action = o case "cancellable": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -91,7 +91,7 @@ func (s *TaskInfo) UnmarshalJSON(data []byte) error { } case "cancelled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *TaskInfo) UnmarshalJSON(data []byte) error { } case "id": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/taskinfos.go b/typedapi/types/taskinfos.go index 1c4b5f8639..d27c8b5381 100644 --- a/typedapi/types/taskinfos.go +++ b/typedapi/types/taskinfos.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // []TaskInfo // map[string]ParentTaskInfo // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/tasks/_types/TaskListResponseBase.ts#L40-L43 -type TaskInfos interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/tasks/_types/TaskListResponseBase.ts#L40-L43 +type TaskInfos any diff --git a/typedapi/types/tasksrecord.go b/typedapi/types/tasksrecord.go index 9a8a464c8f..4156418037 100644 --- a/typedapi/types/tasksrecord.go +++ b/typedapi/types/tasksrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TasksRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/tasks/types.ts#L22-L101 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/tasks/types.ts#L22-L101 type TasksRecord struct { // Action The task action. Action *string `json:"action,omitempty"` diff --git a/typedapi/types/tdigest.go b/typedapi/types/tdigest.go index cd8619744b..c24adc19c9 100644 --- a/typedapi/types/tdigest.go +++ b/typedapi/types/tdigest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TDigest type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L223-L228 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L223-L228 type TDigest struct { // Compression Limits the maximum number of nodes used by the underlying TDigest algorithm // to `20 * compression`, enabling control of memory usage and approximation @@ -56,7 +56,7 @@ func (s *TDigest) UnmarshalJSON(data []byte) error { case "compression": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/tdigestpercentileranksaggregate.go b/typedapi/types/tdigestpercentileranksaggregate.go index f189923960..3e1bcc036c 100644 --- a/typedapi/types/tdigestpercentileranksaggregate.go +++ b/typedapi/types/tdigestpercentileranksaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TDigestPercentileRanksAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L175-L176 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L175-L176 type TDigestPercentileRanksAggregate struct { Meta Metadata `json:"meta,omitempty"` Values Percentiles `json:"values"` diff --git a/typedapi/types/tdigestpercentilesaggregate.go b/typedapi/types/tdigestpercentilesaggregate.go index c0ed744c31..0aac079965 100644 --- a/typedapi/types/tdigestpercentilesaggregate.go +++ b/typedapi/types/tdigestpercentilesaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TDigestPercentilesAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L172-L173 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L172-L173 type TDigestPercentilesAggregate struct { Meta Metadata `json:"meta,omitempty"` Values Percentiles `json:"values"` diff --git a/typedapi/types/template.go b/typedapi/types/template.go index eeba0ae8bc..de4efb4dfa 100644 --- a/typedapi/types/template.go +++ b/typedapi/types/template.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // Template type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L33-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/simulate_template/IndicesSimulateTemplateResponse.ts#L33-L37 type Template struct { Aliases map[string]Alias `json:"aliases"` Mappings TypeMapping `json:"mappings"` diff --git a/typedapi/types/templateconfig.go b/typedapi/types/templateconfig.go index 20bec67bbd..a25696e913 100644 --- a/typedapi/types/templateconfig.go +++ b/typedapi/types/templateconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TemplateConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/msearch_template/types.ts#L28-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/msearch_template/types.ts#L28-L54 type TemplateConfig struct { // Explain If `true`, returns detailed information about score calculation as part of // each hit. @@ -67,7 +67,7 @@ func (s *TemplateConfig) UnmarshalJSON(data []byte) error { switch t { case "explain": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -94,7 +94,7 @@ func (s *TemplateConfig) UnmarshalJSON(data []byte) error { } case "profile": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/templatemapping.go b/typedapi/types/templatemapping.go index 58c9e1886c..7cd5d1210b 100644 --- a/typedapi/types/templatemapping.go +++ b/typedapi/types/templatemapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TemplateMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/TemplateMapping.ts#L27-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/TemplateMapping.ts#L27-L34 type TemplateMapping struct { Aliases map[string]Alias `json:"aliases"` IndexPatterns []string `json:"index_patterns"` @@ -76,7 +76,7 @@ func (s *TemplateMapping) UnmarshalJSON(data []byte) error { case "order": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/templatesrecord.go b/typedapi/types/templatesrecord.go index 31b9e0d5f6..b8d8280d31 100644 --- a/typedapi/types/templatesrecord.go +++ b/typedapi/types/templatesrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TemplatesRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/templates/types.ts#L22-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/templates/types.ts#L22-L48 type TemplatesRecord struct { // ComposedOf The component templates that comprise the index template. ComposedOf *string `json:"composed_of,omitempty"` @@ -42,7 +42,7 @@ type TemplatesRecord struct { // Order The template application order or priority number. Order *string `json:"order,omitempty"` // Version The template version. - Version string `json:"version,omitempty"` + Version *string `json:"version,omitempty"` } func (s *TemplatesRecord) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/term.go b/typedapi/types/term.go index 1e36b00ec0..ab22c0dbcd 100644 --- a/typedapi/types/term.go +++ b/typedapi/types/term.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Term type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/termvectors/types.ts#L34-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/termvectors/types.ts#L34-L40 type Term struct { DocFreq *int `json:"doc_freq,omitempty"` Score *Float64 `json:"score,omitempty"` @@ -57,7 +57,7 @@ func (s *Term) UnmarshalJSON(data []byte) error { case "doc_freq": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *Term) UnmarshalJSON(data []byte) error { } case "score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -89,7 +89,7 @@ func (s *Term) UnmarshalJSON(data []byte) error { case "term_freq": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -110,7 +110,7 @@ func (s *Term) UnmarshalJSON(data []byte) error { case "ttf": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/termquery.go b/typedapi/types/termquery.go index b612dc7b0a..7df086af25 100644 --- a/typedapi/types/termquery.go +++ b/typedapi/types/termquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TermQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L217-L231 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L219-L233 type TermQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -70,7 +70,7 @@ func (s *TermQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -86,7 +86,7 @@ func (s *TermQuery) UnmarshalJSON(data []byte) error { } case "case_insensitive": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/termrangequery.go b/typedapi/types/termrangequery.go new file mode 100644 index 0000000000..10c593f629 --- /dev/null +++ b/typedapi/types/termrangequery.go @@ -0,0 +1,189 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" + + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/rangerelation" +) + +// TermRangeQuery type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L159-L159 +type TermRangeQuery struct { + // Boost Floating point number used to decrease or increase the relevance scores of + // the query. + // Boost values are relative to the default value of 1.0. + // A boost value between 0 and 1.0 decreases the relevance score. + // A value greater than 1.0 increases the relevance score. + Boost *float32 `json:"boost,omitempty"` + From *string `json:"from,omitempty"` + // Gt Greater than. + Gt *string `json:"gt,omitempty"` + // Gte Greater than or equal to. + Gte *string `json:"gte,omitempty"` + // Lt Less than. + Lt *string `json:"lt,omitempty"` + // Lte Less than or equal to. + Lte *string `json:"lte,omitempty"` + QueryName_ *string `json:"_name,omitempty"` + // Relation Indicates how the range query matches values for `range` fields. + Relation *rangerelation.RangeRelation `json:"relation,omitempty"` + To *string `json:"to,omitempty"` +} + +func (s *TermRangeQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return fmt.Errorf("%s | %w", "Boost", err) + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "from": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "From", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.From = &o + + case "gt": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Gt", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Gt = &o + + case "gte": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Gte", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Gte = &o + + case "lt": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Lt", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Lt = &o + + case "lte": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "Lte", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.Lte = &o + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "QueryName_", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.QueryName_ = &o + + case "relation": + if err := dec.Decode(&s.Relation); err != nil { + return fmt.Errorf("%s | %w", "Relation", err) + } + + case "to": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "To", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.To = &o + + } + } + return nil +} + +// NewTermRangeQuery returns a TermRangeQuery. +func NewTermRangeQuery() *TermRangeQuery { + r := &TermRangeQuery{} + + return r +} diff --git a/typedapi/types/termsaggregatebasedoubletermsbucket.go b/typedapi/types/termsaggregatebasedoubletermsbucket.go index d84d972a01..50b2fe73d0 100644 --- a/typedapi/types/termsaggregatebasedoubletermsbucket.go +++ b/typedapi/types/termsaggregatebasedoubletermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TermsAggregateBaseDoubleTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L377-L382 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L379-L384 type TermsAggregateBaseDoubleTermsBucket struct { Buckets BucketsDoubleTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -76,7 +76,7 @@ func (s *TermsAggregateBaseDoubleTermsBucket) UnmarshalJSON(data []byte) error { } case "doc_count_error_upper_bound": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *TermsAggregateBaseDoubleTermsBucket) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/termsaggregatebaselongtermsbucket.go b/typedapi/types/termsaggregatebaselongtermsbucket.go index 40b14f658b..42975833a6 100644 --- a/typedapi/types/termsaggregatebaselongtermsbucket.go +++ b/typedapi/types/termsaggregatebaselongtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TermsAggregateBaseLongTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L377-L382 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L379-L384 type TermsAggregateBaseLongTermsBucket struct { Buckets BucketsLongTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -76,7 +76,7 @@ func (s *TermsAggregateBaseLongTermsBucket) UnmarshalJSON(data []byte) error { } case "doc_count_error_upper_bound": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *TermsAggregateBaseLongTermsBucket) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/termsaggregatebasemultitermsbucket.go b/typedapi/types/termsaggregatebasemultitermsbucket.go index 352b42fa86..e740f16086 100644 --- a/typedapi/types/termsaggregatebasemultitermsbucket.go +++ b/typedapi/types/termsaggregatebasemultitermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TermsAggregateBaseMultiTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L377-L382 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L379-L384 type TermsAggregateBaseMultiTermsBucket struct { Buckets BucketsMultiTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -76,7 +76,7 @@ func (s *TermsAggregateBaseMultiTermsBucket) UnmarshalJSON(data []byte) error { } case "doc_count_error_upper_bound": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *TermsAggregateBaseMultiTermsBucket) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/termsaggregatebasestringtermsbucket.go b/typedapi/types/termsaggregatebasestringtermsbucket.go index 520fd782a4..1c1aa0787a 100644 --- a/typedapi/types/termsaggregatebasestringtermsbucket.go +++ b/typedapi/types/termsaggregatebasestringtermsbucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TermsAggregateBaseStringTermsBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L377-L382 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L379-L384 type TermsAggregateBaseStringTermsBucket struct { Buckets BucketsStringTermsBucket `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -76,7 +76,7 @@ func (s *TermsAggregateBaseStringTermsBucket) UnmarshalJSON(data []byte) error { } case "doc_count_error_upper_bound": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *TermsAggregateBaseStringTermsBucket) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/termsaggregatebasevoid.go b/typedapi/types/termsaggregatebasevoid.go index 8397a903f6..ca423e0ce6 100644 --- a/typedapi/types/termsaggregatebasevoid.go +++ b/typedapi/types/termsaggregatebasevoid.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TermsAggregateBaseVoid type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L377-L382 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L379-L384 type TermsAggregateBaseVoid struct { Buckets BucketsVoid `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -62,13 +62,13 @@ func (s *TermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { localDec := json.NewDecoder(source) switch rawMsg[0] { case '{': - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := localDec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': - o := []interface{}{} + o := []any{} if err := localDec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Buckets", err) } @@ -76,7 +76,7 @@ func (s *TermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { } case "doc_count_error_upper_bound": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *TermsAggregateBaseVoid) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/termsaggregation.go b/typedapi/types/termsaggregation.go index 33d3a9eb7c..f5fef20097 100644 --- a/typedapi/types/termsaggregation.go +++ b/typedapi/types/termsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -36,7 +36,7 @@ import ( // TermsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L910-L970 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L912-L977 type TermsAggregation struct { // CollectMode Determines how child aggregations should be calculated: breadth-first or // depth-first. @@ -53,7 +53,6 @@ type TermsAggregation struct { // Include Values to include. // Accepts regular expressions and partitions. Include TermsInclude `json:"include,omitempty"` - Meta Metadata `json:"meta,omitempty"` // MinDocCount Only return values that are found in more than `min_doc_count` hits. MinDocCount *int `json:"min_doc_count,omitempty"` // Missing The value to apply to documents that do not have a value. @@ -61,11 +60,15 @@ type TermsAggregation struct { Missing Missing `json:"missing,omitempty"` MissingBucket *bool `json:"missing_bucket,omitempty"` MissingOrder *missingorder.MissingOrder `json:"missing_order,omitempty"` - Name *string `json:"name,omitempty"` // Order Specifies the sort order of the buckets. // Defaults to sorting by descending document count. Order AggregateOrder `json:"order,omitempty"` Script Script `json:"script,omitempty"` + // ShardMinDocCount Regulates the certainty a shard has if the term should actually be added to + // the candidate list or not with respect to the `min_doc_count`. + // Terms will only be considered if their local shard frequency within the set + // is higher than the `shard_min_doc_count`. + ShardMinDocCount *int64 `json:"shard_min_doc_count,omitempty"` // ShardSize The number of candidate terms produced by each shard. // By default, `shard_size` will be automatically estimated based on the number // of shards and the `size` parameter. @@ -142,14 +145,9 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Include", err) } - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - case "min_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -169,7 +167,7 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { } case "missing_bucket": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -187,18 +185,6 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "MissingOrder", err) } - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "order": rawMsg := json.RawMessage{} @@ -256,9 +242,24 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { } } + case "shard_min_doc_count": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseInt(v, 10, 64) + if err != nil { + return fmt.Errorf("%s | %w", "ShardMinDocCount", err) + } + s.ShardMinDocCount = &value + case float64: + f := int64(v) + s.ShardMinDocCount = &f + } + case "shard_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -273,7 +274,7 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { } case "show_term_doc_count_error": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -288,7 +289,7 @@ func (s *TermsAggregation) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/termsexclude.go b/typedapi/types/termsexclude.go index a040b4d40e..4ca50d0f67 100644 --- a/typedapi/types/termsexclude.go +++ b/typedapi/types/termsexclude.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // TermsExclude type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L1001-L1002 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L1008-L1009 type TermsExclude []string diff --git a/typedapi/types/termsgrouping.go b/typedapi/types/termsgrouping.go index d259936b2b..6ae3b4c790 100644 --- a/typedapi/types/termsgrouping.go +++ b/typedapi/types/termsgrouping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TermsGrouping type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/rollup/_types/Groupings.ts#L75-L82 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/rollup/_types/Groupings.ts#L75-L82 type TermsGrouping struct { // Fields The set of fields that you wish to collect terms for. // This array can contain fields that are both keyword and numerics. diff --git a/typedapi/types/termsinclude.go b/typedapi/types/termsinclude.go index 471c606237..71eb007b7f 100644 --- a/typedapi/types/termsinclude.go +++ b/typedapi/types/termsinclude.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,5 +26,5 @@ package types // []string // TermsPartition // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L998-L999 -type TermsInclude interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L1005-L1006 +type TermsInclude any diff --git a/typedapi/types/termslookup.go b/typedapi/types/termslookup.go index f2c48f22e4..7008dfec48 100644 --- a/typedapi/types/termslookup.go +++ b/typedapi/types/termslookup.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TermsLookup type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L242-L247 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L247-L252 type TermsLookup struct { Id string `json:"id"` Index string `json:"index"` diff --git a/typedapi/types/termspartition.go b/typedapi/types/termspartition.go index a99dafed13..a447a1839b 100644 --- a/typedapi/types/termspartition.go +++ b/typedapi/types/termspartition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TermsPartition type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L1004-L1013 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L1011-L1020 type TermsPartition struct { // NumPartitions The number of partitions. NumPartitions int64 `json:"num_partitions"` @@ -55,7 +55,7 @@ func (s *TermsPartition) UnmarshalJSON(data []byte) error { switch t { case "num_partitions": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *TermsPartition) UnmarshalJSON(data []byte) error { } case "partition": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/termsquery.go b/typedapi/types/termsquery.go index 9e9f934243..d04db22f54 100644 --- a/typedapi/types/termsquery.go +++ b/typedapi/types/termsquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TermsQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L233-L235 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L235-L240 type TermsQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -59,7 +59,7 @@ func (s *TermsQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -108,7 +108,7 @@ func (s *TermsQuery) UnmarshalJSON(data []byte) error { func (s TermsQuery) MarshalJSON() ([]byte, error) { type opt TermsQuery // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/termsqueryfield.go b/typedapi/types/termsqueryfield.go index 55a9628714..0810bfd0e6 100644 --- a/typedapi/types/termsqueryfield.go +++ b/typedapi/types/termsqueryfield.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // []FieldValue // TermsLookup // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L237-L240 -type TermsQueryField interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L242-L245 +type TermsQueryField any diff --git a/typedapi/types/termssetquery.go b/typedapi/types/termssetquery.go index a5a30465e9..1ef260dc6a 100644 --- a/typedapi/types/termssetquery.go +++ b/typedapi/types/termssetquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TermsSetQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L249-L262 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L254-L267 type TermsSetQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -66,7 +66,7 @@ func (s *TermsSetQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/termsuggest.go b/typedapi/types/termsuggest.go index 5b9256ff71..21ddd0ef27 100644 --- a/typedapi/types/termsuggest.go +++ b/typedapi/types/termsuggest.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TermSuggest type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L64-L69 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L64-L69 type TermSuggest struct { Length int `json:"length"` Offset int `json:"offset"` @@ -56,7 +56,7 @@ func (s *TermSuggest) UnmarshalJSON(data []byte) error { case "length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *TermSuggest) UnmarshalJSON(data []byte) error { case "offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/termsuggester.go b/typedapi/types/termsuggester.go index 2a5f814e83..4578a8e961 100644 --- a/typedapi/types/termsuggester.go +++ b/typedapi/types/termsuggester.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // TermSuggester type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L503-L565 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L506-L568 type TermSuggester struct { // Analyzer The analyzer to analyze the suggest text with. // Defaults to the search analyzer of the suggest field. @@ -121,7 +121,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { } case "lowercase_terms": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -136,7 +136,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case "max_edits": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -152,7 +152,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case "max_inspections": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -167,7 +167,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { } case "max_term_freq": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -183,7 +183,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { } case "min_doc_freq": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -200,7 +200,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case "min_word_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -216,7 +216,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case "prefix_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -232,7 +232,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case "shard_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -248,7 +248,7 @@ func (s *TermSuggester) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/termsuggestoption.go b/typedapi/types/termsuggestoption.go index 21a2ebfced..1aa2d60664 100644 --- a/typedapi/types/termsuggestoption.go +++ b/typedapi/types/termsuggestoption.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TermSuggestOption type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/suggester.ts#L93-L99 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/suggester.ts#L93-L99 type TermSuggestOption struct { CollateMatch *bool `json:"collate_match,omitempty"` Freq int64 `json:"freq"` @@ -56,7 +56,7 @@ func (s *TermSuggestOption) UnmarshalJSON(data []byte) error { switch t { case "collate_match": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *TermSuggestOption) UnmarshalJSON(data []byte) error { } case "freq": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -97,7 +97,7 @@ func (s *TermSuggestOption) UnmarshalJSON(data []byte) error { s.Highlighted = &o case "score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/termvector.go b/typedapi/types/termvector.go index 2f04ed5dbf..3fff5d5ad8 100644 --- a/typedapi/types/termvector.go +++ b/typedapi/types/termvector.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // TermVector type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/termvectors/types.ts#L23-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/termvectors/types.ts#L23-L26 type TermVector struct { FieldStatistics FieldStatistics `json:"field_statistics"` Terms map[string]Term `json:"terms"` diff --git a/typedapi/types/termvectorsfilter.go b/typedapi/types/termvectorsfilter.go index 875a6f7f8a..2b12e25c4a 100644 --- a/typedapi/types/termvectorsfilter.go +++ b/typedapi/types/termvectorsfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TermVectorsFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/termvectors/types.ts#L49-L86 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/termvectors/types.ts#L49-L86 type TermVectorsFilter struct { // MaxDocFreq Ignore words which occur in more than this many docs. // Defaults to unbounded. @@ -69,7 +69,7 @@ func (s *TermVectorsFilter) UnmarshalJSON(data []byte) error { case "max_doc_freq": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *TermVectorsFilter) UnmarshalJSON(data []byte) error { case "max_num_terms": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -101,7 +101,7 @@ func (s *TermVectorsFilter) UnmarshalJSON(data []byte) error { case "max_term_freq": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -117,7 +117,7 @@ func (s *TermVectorsFilter) UnmarshalJSON(data []byte) error { case "max_word_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -133,7 +133,7 @@ func (s *TermVectorsFilter) UnmarshalJSON(data []byte) error { case "min_doc_freq": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -149,7 +149,7 @@ func (s *TermVectorsFilter) UnmarshalJSON(data []byte) error { case "min_term_freq": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -165,7 +165,7 @@ func (s *TermVectorsFilter) UnmarshalJSON(data []byte) error { case "min_word_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/termvectorsresult.go b/typedapi/types/termvectorsresult.go index c371281c21..e96cfdb6a3 100644 --- a/typedapi/types/termvectorsresult.go +++ b/typedapi/types/termvectorsresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TermVectorsResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/mtermvectors/types.ts#L96-L104 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/mtermvectors/types.ts#L96-L104 type TermVectorsResult struct { Error *ErrorCause `json:"error,omitempty"` Found *bool `json:"found,omitempty"` @@ -63,7 +63,7 @@ func (s *TermVectorsResult) UnmarshalJSON(data []byte) error { } case "found": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *TermVectorsResult) UnmarshalJSON(data []byte) error { } case "took": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/termvectorstoken.go b/typedapi/types/termvectorstoken.go index 84aa7a4096..dc570d1113 100644 --- a/typedapi/types/termvectorstoken.go +++ b/typedapi/types/termvectorstoken.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TermVectorsToken type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/termvectors/types.ts#L42-L47 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/termvectors/types.ts#L42-L47 type TermVectorsToken struct { EndOffset *int `json:"end_offset,omitempty"` Payload *string `json:"payload,omitempty"` @@ -56,7 +56,7 @@ func (s *TermVectorsToken) UnmarshalJSON(data []byte) error { case "end_offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -84,7 +84,7 @@ func (s *TermVectorsToken) UnmarshalJSON(data []byte) error { case "position": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *TermVectorsToken) UnmarshalJSON(data []byte) error { case "start_offset": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/testpopulation.go b/typedapi/types/testpopulation.go index 0d3911af99..363be86742 100644 --- a/typedapi/types/testpopulation.go +++ b/typedapi/types/testpopulation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TestPopulation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L310-L320 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L310-L320 type TestPopulation struct { // Field The field to aggregate. Field string `json:"field"` diff --git a/typedapi/types/textclassificationinferenceoptions.go b/typedapi/types/textclassificationinferenceoptions.go index d8d25429a2..15ab471015 100644 --- a/typedapi/types/textclassificationinferenceoptions.go +++ b/typedapi/types/textclassificationinferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TextClassificationInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L189-L199 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L189-L199 type TextClassificationInferenceOptions struct { // ClassificationLabels Classification labels to apply other than the stored labels. Must have the // same deminsions as the default configured labels @@ -67,7 +67,7 @@ func (s *TextClassificationInferenceOptions) UnmarshalJSON(data []byte) error { case "num_top_classes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/textclassificationinferenceupdateoptions.go b/typedapi/types/textclassificationinferenceupdateoptions.go index 97efe6808e..21ebddd15b 100644 --- a/typedapi/types/textclassificationinferenceupdateoptions.go +++ b/typedapi/types/textclassificationinferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TextClassificationInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L363-L372 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L363-L372 type TextClassificationInferenceUpdateOptions struct { // ClassificationLabels Classification labels to apply other than the stored labels. Must have the // same deminsions as the default configured labels @@ -67,7 +67,7 @@ func (s *TextClassificationInferenceUpdateOptions) UnmarshalJSON(data []byte) er case "num_top_classes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/textembedding.go b/typedapi/types/textembedding.go index 3aac71a463..5bbd6fa87c 100644 --- a/typedapi/types/textembedding.go +++ b/typedapi/types/textembedding.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TextEmbedding type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Knn.ts#L56-L59 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Knn.ts#L74-L77 type TextEmbedding struct { ModelId string `json:"model_id"` ModelText string `json:"model_text"` diff --git a/typedapi/types/textembeddingbyteresult.go b/typedapi/types/textembeddingbyteresult.go index 9ef4b2fe35..f646a1d537 100644 --- a/typedapi/types/textembeddingbyteresult.go +++ b/typedapi/types/textembeddingbyteresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TextEmbeddingByteResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/inference/_types/Results.ts#L45-L50 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/_types/Results.ts#L46-L51 type TextEmbeddingByteResult struct { Embedding []byte `json:"embedding"` } diff --git a/typedapi/types/textembeddinginferenceoptions.go b/typedapi/types/textembeddinginferenceoptions.go index f5a01191d3..1636378b51 100644 --- a/typedapi/types/textembeddinginferenceoptions.go +++ b/typedapi/types/textembeddinginferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TextEmbeddingInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L237-L245 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L237-L245 type TextEmbeddingInferenceOptions struct { // EmbeddingSize The number of dimensions in the embedding output EmbeddingSize *int `json:"embedding_size,omitempty"` @@ -59,7 +59,7 @@ func (s *TextEmbeddingInferenceOptions) UnmarshalJSON(data []byte) error { case "embedding_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/textembeddinginferenceupdateoptions.go b/typedapi/types/textembeddinginferenceupdateoptions.go index 6016cd7416..530d79dd17 100644 --- a/typedapi/types/textembeddinginferenceupdateoptions.go +++ b/typedapi/types/textembeddinginferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TextEmbeddingInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L392-L396 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L392-L396 type TextEmbeddingInferenceUpdateOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. diff --git a/typedapi/types/textembeddingresult.go b/typedapi/types/textembeddingresult.go index d2e5dc37c5..c5a2cd8da6 100644 --- a/typedapi/types/textembeddingresult.go +++ b/typedapi/types/textembeddingresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TextEmbeddingResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/inference/_types/Results.ts#L52-L57 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/inference/_types/Results.ts#L53-L58 type TextEmbeddingResult struct { Embedding []float32 `json:"embedding"` } diff --git a/typedapi/types/textexpansioninferenceoptions.go b/typedapi/types/textexpansioninferenceoptions.go index dc2b70db21..f1c9200600 100644 --- a/typedapi/types/textexpansioninferenceoptions.go +++ b/typedapi/types/textexpansioninferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TextExpansionInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L247-L253 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L247-L253 type TextExpansionInferenceOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. diff --git a/typedapi/types/textexpansioninferenceupdateoptions.go b/typedapi/types/textexpansioninferenceupdateoptions.go index 6a50f711f4..4da5534f0c 100644 --- a/typedapi/types/textexpansioninferenceupdateoptions.go +++ b/typedapi/types/textexpansioninferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TextExpansionInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L398-L402 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L398-L402 type TextExpansionInferenceUpdateOptions struct { // ResultsField The field that is added to incoming documents to contain the inference // prediction. Defaults to predicted_value. diff --git a/typedapi/types/textexpansionquery.go b/typedapi/types/textexpansionquery.go index 91c24eee2b..0368671386 100644 --- a/typedapi/types/textexpansionquery.go +++ b/typedapi/types/textexpansionquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TextExpansionQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/TextExpansionQuery.ts#L23-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/TextExpansionQuery.ts#L23-L33 type TextExpansionQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -64,7 +64,7 @@ func (s *TextExpansionQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/textindexprefixes.go b/typedapi/types/textindexprefixes.go index f2da398e82..2300eda999 100644 --- a/typedapi/types/textindexprefixes.go +++ b/typedapi/types/textindexprefixes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TextIndexPrefixes type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L250-L253 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L264-L267 type TextIndexPrefixes struct { MaxChars int `json:"max_chars"` MinChars int `json:"min_chars"` @@ -54,7 +54,7 @@ func (s *TextIndexPrefixes) UnmarshalJSON(data []byte) error { case "max_chars": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *TextIndexPrefixes) UnmarshalJSON(data []byte) error { case "min_chars": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/textproperty.go b/typedapi/types/textproperty.go index 140746a8f1..99b34a4ac2 100644 --- a/typedapi/types/textproperty.go +++ b/typedapi/types/textproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // TextProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L255-L271 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L269-L285 type TextProperty struct { Analyzer *string `json:"analyzer,omitempty"` Boost *Float64 `json:"boost,omitempty"` @@ -91,7 +91,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { s.Analyzer = &o case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -128,7 +128,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { } case "eager_global_ordinals": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -142,7 +142,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { } case "fielddata": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -167,7 +167,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -188,7 +188,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -278,12 +278,6 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -302,6 +296,18 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -458,6 +464,12 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -469,7 +481,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -484,7 +496,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -503,7 +515,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { } case "index_phrases": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -530,7 +542,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { } case "norms": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -545,7 +557,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { case "position_increment_gap": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -566,7 +578,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -587,7 +599,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -677,12 +689,6 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -701,6 +707,18 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -857,6 +875,12 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -903,7 +927,7 @@ func (s *TextProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/texttoanalyze.go b/typedapi/types/texttoanalyze.go index 7a9f04b3e5..5c4ed4e4a4 100644 --- a/typedapi/types/texttoanalyze.go +++ b/typedapi/types/texttoanalyze.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // TextToAnalyze type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/analyze/types.ts#L66-L66 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/analyze/types.ts#L69-L69 type TextToAnalyze []string diff --git a/typedapi/types/threadcount.go b/typedapi/types/threadcount.go index 291bee27a3..4c052946a0 100644 --- a/typedapi/types/threadcount.go +++ b/typedapi/types/threadcount.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ThreadCount type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L1004-L1029 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L1004-L1029 type ThreadCount struct { // Active Number of active threads in the thread pool. Active *int64 `json:"active,omitempty"` @@ -63,7 +63,7 @@ func (s *ThreadCount) UnmarshalJSON(data []byte) error { switch t { case "active": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -78,7 +78,7 @@ func (s *ThreadCount) UnmarshalJSON(data []byte) error { } case "completed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -93,7 +93,7 @@ func (s *ThreadCount) UnmarshalJSON(data []byte) error { } case "largest": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -108,7 +108,7 @@ func (s *ThreadCount) UnmarshalJSON(data []byte) error { } case "queue": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -123,7 +123,7 @@ func (s *ThreadCount) UnmarshalJSON(data []byte) error { } case "rejected": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -138,7 +138,7 @@ func (s *ThreadCount) UnmarshalJSON(data []byte) error { } case "threads": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/threadpoolrecord.go b/typedapi/types/threadpoolrecord.go index fc046a85e4..a61b2bd7da 100644 --- a/typedapi/types/threadpoolrecord.go +++ b/typedapi/types/threadpoolrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,14 +31,14 @@ import ( // ThreadPoolRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/thread_pool/types.ts#L22-L124 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/thread_pool/types.ts#L22-L124 type ThreadPoolRecord struct { // Active The number of active threads in the current thread pool. Active *string `json:"active,omitempty"` // Completed The number of completed tasks. Completed *string `json:"completed,omitempty"` // Core The core number of active threads allowed in a scaling thread pool. - Core string `json:"core,omitempty"` + Core *string `json:"core,omitempty"` // EphemeralNodeId The ephemeral node identifier. EphemeralNodeId *string `json:"ephemeral_node_id,omitempty"` // Host The host name for the current node. @@ -46,11 +46,11 @@ type ThreadPoolRecord struct { // Ip The IP address for the current node. Ip *string `json:"ip,omitempty"` // KeepAlive The thread keep alive time. - KeepAlive string `json:"keep_alive,omitempty"` + KeepAlive *string `json:"keep_alive,omitempty"` // Largest The highest number of active threads in the current thread pool. Largest *string `json:"largest,omitempty"` // Max The maximum number of active threads allowed in a scaling thread pool. - Max string `json:"max,omitempty"` + Max *string `json:"max,omitempty"` // Name The thread pool name. Name *string `json:"name,omitempty"` // NodeId The persistent node identifier. @@ -70,7 +70,7 @@ type ThreadPoolRecord struct { // Rejected The number of rejected tasks. Rejected *string `json:"rejected,omitempty"` // Size The number of active threads allowed in a fixed thread pool. - Size string `json:"size,omitempty"` + Size *string `json:"size,omitempty"` // Type The thread pool type. // Returned values include `fixed`, `fixed_auto_queue_size`, `direct`, and // `scaling`. @@ -126,7 +126,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.Core = o + s.Core = &o case "ephemeral_node_id", "eid": var tmp json.RawMessage @@ -174,7 +174,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.KeepAlive = o + s.KeepAlive = &o case "largest", "l": var tmp json.RawMessage @@ -198,7 +198,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.Max = o + s.Max = &o case "name", "n": var tmp json.RawMessage @@ -311,7 +311,7 @@ func (s *ThreadPoolRecord) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.Size = o + s.Size = &o case "type", "t": var tmp json.RawMessage diff --git a/typedapi/types/throttlestate.go b/typedapi/types/throttlestate.go index e8cfadd42b..a15194f380 100644 --- a/typedapi/types/throttlestate.go +++ b/typedapi/types/throttlestate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ThrottleState type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Action.ts#L126-L129 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Action.ts#L126-L129 type ThrottleState struct { Reason string `json:"reason"` Timestamp DateTime `json:"timestamp"` diff --git a/typedapi/types/timeofmonth.go b/typedapi/types/timeofmonth.go index a4e1623ea5..343ca38026 100644 --- a/typedapi/types/timeofmonth.go +++ b/typedapi/types/timeofmonth.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // TimeOfMonth type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Schedule.ts#L110-L113 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Schedule.ts#L110-L113 type TimeOfMonth struct { At []string `json:"at"` On []int `json:"on"` diff --git a/typedapi/types/timeofweek.go b/typedapi/types/timeofweek.go index a22f395fa4..f073205bc5 100644 --- a/typedapi/types/timeofweek.go +++ b/typedapi/types/timeofweek.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // TimeOfWeek type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Schedule.ts#L115-L118 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Schedule.ts#L115-L118 type TimeOfWeek struct { At []string `json:"at"` On []day.Day `json:"on"` diff --git a/typedapi/types/timeofyear.go b/typedapi/types/timeofyear.go index 977e399035..048d34c71c 100644 --- a/typedapi/types/timeofyear.go +++ b/typedapi/types/timeofyear.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // TimeOfYear type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Schedule.ts#L120-L124 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Schedule.ts#L120-L124 type TimeOfYear struct { At []string `json:"at"` Int []month.Month `json:"int"` diff --git a/typedapi/types/timesync.go b/typedapi/types/timesync.go index 291e9006bd..0cf73025d8 100644 --- a/typedapi/types/timesync.go +++ b/typedapi/types/timesync.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TimeSync type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/_types/Transform.ts#L177-L189 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/_types/Transform.ts#L177-L189 type TimeSync struct { // Delay The time delay between the current time and the latest input data time. Delay Duration `json:"delay,omitempty"` diff --git a/typedapi/types/timingstats.go b/typedapi/types/timingstats.go index f6461bdcb4..8e9cfb7e03 100644 --- a/typedapi/types/timingstats.go +++ b/typedapi/types/timingstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TimingStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L563-L568 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L563-L568 type TimingStats struct { // ElapsedTime Runtime of the analysis in milliseconds. ElapsedTime int64 `json:"elapsed_time"` diff --git a/typedapi/types/tokencountproperty.go b/typedapi/types/tokencountproperty.go index 107404728d..80efd5116d 100644 --- a/typedapi/types/tokencountproperty.go +++ b/typedapi/types/tokencountproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // TokenCountProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/specialized.ts#L79-L86 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/specialized.ts#L85-L92 type TokenCountProperty struct { Analyzer *string `json:"analyzer,omitempty"` Boost *Float64 `json:"boost,omitempty"` @@ -81,7 +81,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { s.Analyzer = &o case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,7 +113,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -132,7 +132,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { } case "enable_position_increments": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -152,7 +152,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -173,7 +173,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -263,12 +263,6 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -287,6 +281,18 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -443,6 +449,12 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -454,7 +466,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -469,7 +481,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -491,7 +503,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { } case "null_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -513,7 +525,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -534,7 +546,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -624,12 +636,6 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -648,6 +654,18 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -804,6 +822,12 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -826,7 +850,7 @@ func (s *TokenCountProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/tokendetail.go b/typedapi/types/tokendetail.go index 500af73f83..ee3b43b5bb 100644 --- a/typedapi/types/tokendetail.go +++ b/typedapi/types/tokendetail.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TokenDetail type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/analyze/types.ts#L68-L71 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/analyze/types.ts#L71-L74 type TokenDetail struct { Name string `json:"name"` Tokens []ExplainAnalyzeToken `json:"tokens"` diff --git a/typedapi/types/tokenfilter.go b/typedapi/types/tokenfilter.go index 07d1ae86e8..8d4f906fb9 100644 --- a/typedapi/types/tokenfilter.go +++ b/typedapi/types/tokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // string // TokenFilterDefinition // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L344-L346 -type TokenFilter interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L346-L348 +type TokenFilter any diff --git a/typedapi/types/tokenfilterdefinition.go b/typedapi/types/tokenfilterdefinition.go index 64a7e3a903..c7ef8c8df9 100644 --- a/typedapi/types/tokenfilterdefinition.go +++ b/typedapi/types/tokenfilterdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -70,5 +70,5 @@ package types // PhoneticTokenFilter // DictionaryDecompounderTokenFilter // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L348-L400 -type TokenFilterDefinition interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L350-L402 +type TokenFilterDefinition any diff --git a/typedapi/types/tokenizationconfigcontainer.go b/typedapi/types/tokenizationconfigcontainer.go index 54f593ec5f..15376e297f 100644 --- a/typedapi/types/tokenizationconfigcontainer.go +++ b/typedapi/types/tokenizationconfigcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // TokenizationConfigContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L110-L129 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L110-L129 type TokenizationConfigContainer struct { // Bert Indicates BERT tokenization and its options Bert *NlpBertTokenizationConfig `json:"bert,omitempty"` diff --git a/typedapi/types/tokenizer.go b/typedapi/types/tokenizer.go index 645d69793e..981b04029a 100644 --- a/typedapi/types/tokenizer.go +++ b/typedapi/types/tokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // string // TokenizerDefinition // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L120-L122 -type Tokenizer interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L120-L122 +type Tokenizer any diff --git a/typedapi/types/tokenizerdefinition.go b/typedapi/types/tokenizerdefinition.go index 80f9bd8f76..6b46dd9782 100644 --- a/typedapi/types/tokenizerdefinition.go +++ b/typedapi/types/tokenizerdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -37,5 +37,5 @@ package types // PatternTokenizer // IcuTokenizer // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L124-L142 -type TokenizerDefinition interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L124-L142 +type TokenizerDefinition any diff --git a/typedapi/types/tokenpruningconfig.go b/typedapi/types/tokenpruningconfig.go index 1fd5c70c83..7a24776756 100644 --- a/typedapi/types/tokenpruningconfig.go +++ b/typedapi/types/tokenpruningconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TokenPruningConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/TokenPruningConfig.ts#L22-L35 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/TokenPruningConfig.ts#L22-L35 type TokenPruningConfig struct { // OnlyScorePrunedTokens Whether to only score pruned tokens, vs only scoring kept tokens. OnlyScorePrunedTokens *bool `json:"only_score_pruned_tokens,omitempty"` @@ -60,7 +60,7 @@ func (s *TokenPruningConfig) UnmarshalJSON(data []byte) error { switch t { case "only_score_pruned_tokens": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -75,7 +75,7 @@ func (s *TokenPruningConfig) UnmarshalJSON(data []byte) error { case "tokens_freq_ratio_threshold": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -90,7 +90,7 @@ func (s *TokenPruningConfig) UnmarshalJSON(data []byte) error { } case "tokens_weight_threshold": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/topclassentry.go b/typedapi/types/topclassentry.go index 304fbe715d..f288fd564f 100644 --- a/typedapi/types/topclassentry.go +++ b/typedapi/types/topclassentry.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TopClassEntry type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L440-L444 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L440-L444 type TopClassEntry struct { ClassName string `json:"class_name"` ClassProbability Float64 `json:"class_probability"` @@ -66,7 +66,7 @@ func (s *TopClassEntry) UnmarshalJSON(data []byte) error { s.ClassName = o case "class_probability": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -82,7 +82,7 @@ func (s *TopClassEntry) UnmarshalJSON(data []byte) error { } case "class_score": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/tophit.go b/typedapi/types/tophit.go index 2c845f6d23..929e4778c0 100644 --- a/typedapi/types/tophit.go +++ b/typedapi/types/tophit.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TopHit type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/text_structure/find_structure/types.ts#L35-L38 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/text_structure/find_structure/types.ts#L35-L38 type TopHit struct { Count int64 `json:"count"` Value json.RawMessage `json:"value,omitempty"` @@ -53,7 +53,7 @@ func (s *TopHit) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/tophitsaggregate.go b/typedapi/types/tophitsaggregate.go index 8cab12b6e5..f56fb8b497 100644 --- a/typedapi/types/tophitsaggregate.go +++ b/typedapi/types/tophitsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TopHitsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L654-L657 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L658-L661 type TopHitsAggregate struct { Hits HitsMetadata `json:"hits"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/tophitsaggregation.go b/typedapi/types/tophitsaggregation.go index 2b0eee794f..afed796a74 100644 --- a/typedapi/types/tophitsaggregation.go +++ b/typedapi/types/tophitsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,15 +31,18 @@ import ( // TopHitsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L337-L392 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L337-L397 type TopHitsAggregation struct { // DocvalueFields Fields for which to return doc values. - DocvalueFields []string `json:"docvalue_fields,omitempty"` + DocvalueFields []FieldAndFormat `json:"docvalue_fields,omitempty"` // Explain If `true`, returns detailed information about score computation as part of a // hit. Explain *bool `json:"explain,omitempty"` // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` + // Fields Array of wildcard (*) patterns. The request returns values for field names + // matching these patterns in the hits.fields property of the response. + Fields []FieldAndFormat `json:"fields,omitempty"` // From Starting document offset. From *int `json:"from,omitempty"` // Highlight Specifies the highlighter to use for retrieving highlighted snippets from one @@ -87,23 +90,12 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { switch t { case "docvalue_fields": - rawMsg := json.RawMessage{} - dec.Decode(&rawMsg) - if !bytes.HasPrefix(rawMsg, []byte("[")) { - o := new(string) - if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&o); err != nil { - return fmt.Errorf("%s | %w", "DocvalueFields", err) - } - - s.DocvalueFields = append(s.DocvalueFields, *o) - } else { - if err := json.NewDecoder(bytes.NewReader(rawMsg)).Decode(&s.DocvalueFields); err != nil { - return fmt.Errorf("%s | %w", "DocvalueFields", err) - } + if err := dec.Decode(&s.DocvalueFields); err != nil { + return fmt.Errorf("%s | %w", "DocvalueFields", err) } case "explain": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -121,9 +113,14 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { return fmt.Errorf("%s | %w", "Field", err) } + case "fields": + if err := dec.Decode(&s.Fields); err != nil { + return fmt.Errorf("%s | %w", "Fields", err) + } + case "from": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -192,7 +189,7 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { } case "seq_no_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -207,7 +204,7 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -259,7 +256,7 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { } case "track_scores": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -273,7 +270,7 @@ func (s *TopHitsAggregation) UnmarshalJSON(data []byte) error { } case "version": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/topleftbottomrightgeobounds.go b/typedapi/types/topleftbottomrightgeobounds.go index 69783fc1fb..0ab55d25c9 100644 --- a/typedapi/types/topleftbottomrightgeobounds.go +++ b/typedapi/types/topleftbottomrightgeobounds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TopLeftBottomRightGeoBounds type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Geo.ts#L161-L164 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Geo.ts#L161-L164 type TopLeftBottomRightGeoBounds struct { BottomRight GeoLocation `json:"bottom_right"` TopLeft GeoLocation `json:"top_left"` diff --git a/typedapi/types/topmetrics.go b/typedapi/types/topmetrics.go index e8d0f8cb85..b1d4b4376f 100644 --- a/typedapi/types/topmetrics.go +++ b/typedapi/types/topmetrics.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // TopMetrics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L729-L733 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L736-L740 type TopMetrics struct { Metrics map[string]FieldValue `json:"metrics"` Sort []FieldValue `json:"sort"` diff --git a/typedapi/types/topmetricsaggregate.go b/typedapi/types/topmetricsaggregate.go index 1707158802..318db5a5ee 100644 --- a/typedapi/types/topmetricsaggregate.go +++ b/typedapi/types/topmetricsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TopMetricsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L724-L727 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L731-L734 type TopMetricsAggregate struct { Meta Metadata `json:"meta,omitempty"` Top []TopMetrics `json:"top"` diff --git a/typedapi/types/topmetricsaggregation.go b/typedapi/types/topmetricsaggregation.go index 47a60df5cc..3a9aa7a632 100644 --- a/typedapi/types/topmetricsaggregation.go +++ b/typedapi/types/topmetricsaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TopMetricsAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L394-L408 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L399-L413 type TopMetricsAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` @@ -126,7 +126,7 @@ func (s *TopMetricsAggregation) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/topmetricsvalue.go b/typedapi/types/topmetricsvalue.go index b70b5a43f0..3f9a8f3fd1 100644 --- a/typedapi/types/topmetricsvalue.go +++ b/typedapi/types/topmetricsvalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TopMetricsValue type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L410-L415 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L415-L420 type TopMetricsValue struct { // Field A field to return as a metric. Field string `json:"field"` diff --git a/typedapi/types/toprightbottomleftgeobounds.go b/typedapi/types/toprightbottomleftgeobounds.go index 5a8e7e4451..d09ba2555f 100644 --- a/typedapi/types/toprightbottomleftgeobounds.go +++ b/typedapi/types/toprightbottomleftgeobounds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TopRightBottomLeftGeoBounds type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Geo.ts#L166-L169 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Geo.ts#L166-L169 type TopRightBottomLeftGeoBounds struct { BottomLeft GeoLocation `json:"bottom_left"` TopRight GeoLocation `json:"top_right"` diff --git a/typedapi/types/totalfeatureimportance.go b/typedapi/types/totalfeatureimportance.go index dedb47c869..910bc73f71 100644 --- a/typedapi/types/totalfeatureimportance.go +++ b/typedapi/types/totalfeatureimportance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TotalFeatureImportance type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L233-L240 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L233-L240 type TotalFeatureImportance struct { // Classes If the trained model is a classification model, feature importance statistics // are gathered per target class value. diff --git a/typedapi/types/totalfeatureimportanceclass.go b/typedapi/types/totalfeatureimportanceclass.go index 94827754a8..1b906a222d 100644 --- a/typedapi/types/totalfeatureimportanceclass.go +++ b/typedapi/types/totalfeatureimportanceclass.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TotalFeatureImportanceClass type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L242-L247 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L242-L247 type TotalFeatureImportanceClass struct { // ClassName The target class value. Could be a string, boolean, or number. ClassName string `json:"class_name"` diff --git a/typedapi/types/totalfeatureimportancestatistics.go b/typedapi/types/totalfeatureimportancestatistics.go index ae9926b684..9edcee420f 100644 --- a/typedapi/types/totalfeatureimportancestatistics.go +++ b/typedapi/types/totalfeatureimportancestatistics.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TotalFeatureImportanceStatistics type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L249-L256 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L249-L256 type TotalFeatureImportanceStatistics struct { // Max The maximum importance value across all the training data for this feature. Max int `json:"max"` @@ -60,7 +60,7 @@ func (s *TotalFeatureImportanceStatistics) UnmarshalJSON(data []byte) error { case "max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -75,7 +75,7 @@ func (s *TotalFeatureImportanceStatistics) UnmarshalJSON(data []byte) error { } case "mean_magnitude": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *TotalFeatureImportanceStatistics) UnmarshalJSON(data []byte) error { case "min": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/totaluserprofiles.go b/typedapi/types/totaluserprofiles.go index d4d3314875..e4a338b674 100644 --- a/typedapi/types/totaluserprofiles.go +++ b/typedapi/types/totaluserprofiles.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TotalUserProfiles type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/suggest_user_profiles/Response.ts#L24-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/suggest_user_profiles/Response.ts#L24-L27 type TotalUserProfiles struct { Relation string `json:"relation"` Value int64 `json:"value"` @@ -58,7 +58,7 @@ func (s *TotalUserProfiles) UnmarshalJSON(data []byte) error { } case "value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/trackhits.go b/typedapi/types/trackhits.go index 040686cd42..22c41ddd5e 100644 --- a/typedapi/types/trackhits.go +++ b/typedapi/types/trackhits.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // bool // int // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/search/_types/hits.ts#L142-L150 -type TrackHits interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/search/_types/hits.ts#L142-L150 +type TrackHits any diff --git a/typedapi/types/trainedmodel.go b/typedapi/types/trainedmodel.go index 0773109b13..40c15334ec 100644 --- a/typedapi/types/trainedmodel.go +++ b/typedapi/types/trainedmodel.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // TrainedModel type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model/types.ts#L60-L72 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model/types.ts#L60-L72 type TrainedModel struct { // Ensemble The definition for an ensemble model Ensemble *Ensemble `json:"ensemble,omitempty"` diff --git a/typedapi/types/trainedmodelassignment.go b/typedapi/types/trainedmodelassignment.go index d38952c4bd..b92b018312 100644 --- a/typedapi/types/trainedmodelassignment.go +++ b/typedapi/types/trainedmodelassignment.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // TrainedModelAssignment type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L403-L418 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L403-L418 type TrainedModelAssignment struct { // AssignmentState The overall assignment state. AssignmentState deploymentassignmentstate.DeploymentAssignmentState `json:"assignment_state"` @@ -67,7 +67,7 @@ func (s *TrainedModelAssignment) UnmarshalJSON(data []byte) error { case "max_assigned_allocations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/trainedmodelassignmentroutingtable.go b/typedapi/types/trainedmodelassignmentroutingtable.go index ad723b27ca..b7a8043f27 100644 --- a/typedapi/types/trainedmodelassignmentroutingtable.go +++ b/typedapi/types/trainedmodelassignmentroutingtable.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // TrainedModelAssignmentRoutingTable type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L374-L392 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L374-L392 type TrainedModelAssignmentRoutingTable struct { // CurrentAllocations Current number of allocations. CurrentAllocations int `json:"current_allocations"` @@ -63,7 +63,7 @@ func (s *TrainedModelAssignmentRoutingTable) UnmarshalJSON(data []byte) error { case "current_allocations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *TrainedModelAssignmentRoutingTable) UnmarshalJSON(data []byte) error { case "target_allocations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/trainedmodelassignmenttaskparameters.go b/typedapi/types/trainedmodelassignmenttaskparameters.go index 9170271209..696e1aff99 100644 --- a/typedapi/types/trainedmodelassignmenttaskparameters.go +++ b/typedapi/types/trainedmodelassignmenttaskparameters.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // TrainedModelAssignmentTaskParameters type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L316-L349 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L316-L349 type TrainedModelAssignmentTaskParameters struct { // CacheSize The size of the trained model cache. CacheSize ByteSize `json:"cache_size"` @@ -79,7 +79,7 @@ func (s *TrainedModelAssignmentTaskParameters) UnmarshalJSON(data []byte) error case "model_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *TrainedModelAssignmentTaskParameters) UnmarshalJSON(data []byte) error case "number_of_allocations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -121,7 +121,7 @@ func (s *TrainedModelAssignmentTaskParameters) UnmarshalJSON(data []byte) error case "queue_capacity": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -137,7 +137,7 @@ func (s *TrainedModelAssignmentTaskParameters) UnmarshalJSON(data []byte) error case "threads_per_allocation": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/trainedmodelconfig.go b/typedapi/types/trainedmodelconfig.go index 001e1ba33c..e34755704d 100644 --- a/typedapi/types/trainedmodelconfig.go +++ b/typedapi/types/trainedmodelconfig.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // TrainedModelConfig type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L165-L200 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L165-L200 type TrainedModelConfig struct { CompressedDefinition *string `json:"compressed_definition,omitempty"` // CreateTime The time when the trained model was created. @@ -142,7 +142,7 @@ func (s *TrainedModelConfig) UnmarshalJSON(data []byte) error { case "estimated_heap_memory_usage_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -158,7 +158,7 @@ func (s *TrainedModelConfig) UnmarshalJSON(data []byte) error { case "estimated_operations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -173,7 +173,7 @@ func (s *TrainedModelConfig) UnmarshalJSON(data []byte) error { } case "fully_defined": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/trainedmodelconfiginput.go b/typedapi/types/trainedmodelconfiginput.go index 46e2584490..09d070b002 100644 --- a/typedapi/types/trainedmodelconfiginput.go +++ b/typedapi/types/trainedmodelconfiginput.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // TrainedModelConfigInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L202-L205 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L202-L205 type TrainedModelConfigInput struct { // FieldNames An array of input field names for the model. FieldNames []string `json:"field_names"` diff --git a/typedapi/types/trainedmodelconfigmetadata.go b/typedapi/types/trainedmodelconfigmetadata.go index 284e787f38..114b806183 100644 --- a/typedapi/types/trainedmodelconfigmetadata.go +++ b/typedapi/types/trainedmodelconfigmetadata.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // TrainedModelConfigMetadata type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L207-L215 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L207-L215 type TrainedModelConfigMetadata struct { // FeatureImportanceBaseline An object that contains the baseline for feature importance values. For // regression analysis, it is a single value. For classification analysis, there diff --git a/typedapi/types/trainedmodeldeploymentallocationstatus.go b/typedapi/types/trainedmodeldeploymentallocationstatus.go index 542dd0ffb2..aeff241398 100644 --- a/typedapi/types/trainedmodeldeploymentallocationstatus.go +++ b/typedapi/types/trainedmodeldeploymentallocationstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // TrainedModelDeploymentAllocationStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L394-L401 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L394-L401 type TrainedModelDeploymentAllocationStatus struct { // AllocationCount The current number of nodes where the model is allocated. AllocationCount int `json:"allocation_count"` @@ -60,7 +60,7 @@ func (s *TrainedModelDeploymentAllocationStatus) UnmarshalJSON(data []byte) erro case "allocation_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -81,7 +81,7 @@ func (s *TrainedModelDeploymentAllocationStatus) UnmarshalJSON(data []byte) erro case "target_allocation_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/trainedmodeldeploymentnodesstats.go b/typedapi/types/trainedmodeldeploymentnodesstats.go index 463e3ff0d4..1db4862a3e 100644 --- a/typedapi/types/trainedmodeldeploymentnodesstats.go +++ b/typedapi/types/trainedmodeldeploymentnodesstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TrainedModelDeploymentNodesStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L133-L163 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L133-L163 type TrainedModelDeploymentNodesStats struct { // AverageInferenceTimeMs The average time for each inference call to complete on this node. AverageInferenceTimeMs Float64 `json:"average_inference_time_ms"` @@ -83,7 +83,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case "error_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -99,7 +99,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case "inference_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { } case "last_access": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -135,7 +135,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case "number_of_allocations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -151,7 +151,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case "number_of_pending_requests": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -167,7 +167,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case "rejection_execution_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -193,7 +193,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case "threads_per_allocation": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -209,7 +209,7 @@ func (s *TrainedModelDeploymentNodesStats) UnmarshalJSON(data []byte) error { case "timeout_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/trainedmodeldeploymentstats.go b/typedapi/types/trainedmodeldeploymentstats.go index 1564fae337..1f6e3fbe09 100644 --- a/typedapi/types/trainedmodeldeploymentstats.go +++ b/typedapi/types/trainedmodeldeploymentstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // TrainedModelDeploymentStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L62-L102 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L62-L102 type TrainedModelDeploymentStats struct { // AllocationStatus The detailed allocation status for the deployment. AllocationStatus TrainedModelDeploymentAllocationStatus `json:"allocation_status"` @@ -104,7 +104,7 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case "error_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -120,7 +120,7 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case "inference_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -146,7 +146,7 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case "number_of_allocations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -162,7 +162,7 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case "queue_capacity": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -190,7 +190,7 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case "rejected_execution_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -216,7 +216,7 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case "threads_per_allocation": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -232,7 +232,7 @@ func (s *TrainedModelDeploymentStats) UnmarshalJSON(data []byte) error { case "timeout_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/trainedmodelentities.go b/typedapi/types/trainedmodelentities.go index ebb54484b7..a4808e7104 100644 --- a/typedapi/types/trainedmodelentities.go +++ b/typedapi/types/trainedmodelentities.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TrainedModelEntities type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L433-L439 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L433-L439 type TrainedModelEntities struct { ClassName string `json:"class_name"` ClassProbability Float64 `json:"class_probability"` @@ -68,7 +68,7 @@ func (s *TrainedModelEntities) UnmarshalJSON(data []byte) error { s.ClassName = o case "class_probability": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *TrainedModelEntities) UnmarshalJSON(data []byte) error { case "end_pos": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,7 +113,7 @@ func (s *TrainedModelEntities) UnmarshalJSON(data []byte) error { case "start_pos": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/trainedmodelinferenceclassimportance.go b/typedapi/types/trainedmodelinferenceclassimportance.go index a716a546e5..f6848b00d4 100644 --- a/typedapi/types/trainedmodelinferenceclassimportance.go +++ b/typedapi/types/trainedmodelinferenceclassimportance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TrainedModelInferenceClassImportance type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L446-L449 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L446-L449 type TrainedModelInferenceClassImportance struct { ClassName string `json:"class_name"` Importance Float64 `json:"importance"` @@ -65,7 +65,7 @@ func (s *TrainedModelInferenceClassImportance) UnmarshalJSON(data []byte) error s.ClassName = o case "importance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/trainedmodelinferencefeatureimportance.go b/typedapi/types/trainedmodelinferencefeatureimportance.go index 4ffee26b6a..afcfa11402 100644 --- a/typedapi/types/trainedmodelinferencefeatureimportance.go +++ b/typedapi/types/trainedmodelinferencefeatureimportance.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TrainedModelInferenceFeatureImportance type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L451-L455 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L451-L455 type TrainedModelInferenceFeatureImportance struct { Classes []TrainedModelInferenceClassImportance `json:"classes,omitempty"` FeatureName string `json:"feature_name"` @@ -71,7 +71,7 @@ func (s *TrainedModelInferenceFeatureImportance) UnmarshalJSON(data []byte) erro s.FeatureName = o case "importance": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/trainedmodelinferencestats.go b/typedapi/types/trainedmodelinferencestats.go index e3e71ec93b..1439f15772 100644 --- a/typedapi/types/trainedmodelinferencestats.go +++ b/typedapi/types/trainedmodelinferencestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TrainedModelInferenceStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L104-L124 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L104-L124 type TrainedModelInferenceStats struct { // CacheMissCount The number of times the model was loaded for inference and was not retrieved // from the cache. @@ -69,7 +69,7 @@ func (s *TrainedModelInferenceStats) UnmarshalJSON(data []byte) error { case "cache_miss_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +85,7 @@ func (s *TrainedModelInferenceStats) UnmarshalJSON(data []byte) error { case "failure_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -101,7 +101,7 @@ func (s *TrainedModelInferenceStats) UnmarshalJSON(data []byte) error { case "inference_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -117,7 +117,7 @@ func (s *TrainedModelInferenceStats) UnmarshalJSON(data []byte) error { case "missing_all_fields_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/trainedmodellocation.go b/typedapi/types/trainedmodellocation.go index 7de270ebbf..3e3c2fff50 100644 --- a/typedapi/types/trainedmodellocation.go +++ b/typedapi/types/trainedmodellocation.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // TrainedModelLocation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L420-L422 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L420-L422 type TrainedModelLocation struct { Index TrainedModelLocationIndex `json:"index"` } diff --git a/typedapi/types/trainedmodellocationindex.go b/typedapi/types/trainedmodellocationindex.go index 01073c692c..418ce7d69c 100644 --- a/typedapi/types/trainedmodellocationindex.go +++ b/typedapi/types/trainedmodellocationindex.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TrainedModelLocationIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L424-L426 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L424-L426 type TrainedModelLocationIndex struct { Name string `json:"name"` } diff --git a/typedapi/types/trainedmodelprefixstrings.go b/typedapi/types/trainedmodelprefixstrings.go index e9dbe7a713..c8c8c02f35 100644 --- a/typedapi/types/trainedmodelprefixstrings.go +++ b/typedapi/types/trainedmodelprefixstrings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TrainedModelPrefixStrings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L428-L437 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L428-L437 type TrainedModelPrefixStrings struct { // Ingest String prepended to input at ingest Ingest *string `json:"ingest,omitempty"` diff --git a/typedapi/types/trainedmodelsizestats.go b/typedapi/types/trainedmodelsizestats.go index 4b0f395936..4f582fb32f 100644 --- a/typedapi/types/trainedmodelsizestats.go +++ b/typedapi/types/trainedmodelsizestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TrainedModelSizeStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L126-L131 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L126-L131 type TrainedModelSizeStats struct { // ModelSizeBytes The size of the model in bytes. ModelSizeBytes ByteSize `json:"model_size_bytes"` @@ -61,7 +61,7 @@ func (s *TrainedModelSizeStats) UnmarshalJSON(data []byte) error { case "required_native_memory_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/trainedmodelsrecord.go b/typedapi/types/trainedmodelsrecord.go index 3ebbcfc20c..a9fc1707ad 100644 --- a/typedapi/types/trainedmodelsrecord.go +++ b/typedapi/types/trainedmodelsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TrainedModelsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/ml_trained_models/types.ts#L23-L115 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/ml_trained_models/types.ts#L23-L115 type TrainedModelsRecord struct { // CreateTime The time the model was created. CreateTime DateTime `json:"create_time,omitempty"` diff --git a/typedapi/types/trainedmodelstats.go b/typedapi/types/trainedmodelstats.go index b43648564c..751bf34d72 100644 --- a/typedapi/types/trainedmodelstats.go +++ b/typedapi/types/trainedmodelstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TrainedModelStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/TrainedModel.ts#L42-L60 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/TrainedModel.ts#L42-L60 type TrainedModelStats struct { // DeploymentStats A collection of deployment stats, which is present when the models are // deployed. @@ -95,7 +95,7 @@ func (s *TrainedModelStats) UnmarshalJSON(data []byte) error { case "pipeline_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/trainedmodeltree.go b/typedapi/types/trainedmodeltree.go index 8a6ed5902a..47a19780d9 100644 --- a/typedapi/types/trainedmodeltree.go +++ b/typedapi/types/trainedmodeltree.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TrainedModelTree type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model/types.ts#L74-L79 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model/types.ts#L74-L79 type TrainedModelTree struct { ClassificationLabels []string `json:"classification_labels,omitempty"` FeatureNames []string `json:"feature_names"` diff --git a/typedapi/types/trainedmodeltreenode.go b/typedapi/types/trainedmodeltreenode.go index 486cd6e260..237f37b672 100644 --- a/typedapi/types/trainedmodeltreenode.go +++ b/typedapi/types/trainedmodeltreenode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TrainedModelTreeNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model/types.ts#L81-L91 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model/types.ts#L81-L91 type TrainedModelTreeNode struct { DecisionType *string `json:"decision_type,omitempty"` DefaultLeft *bool `json:"default_left,omitempty"` @@ -72,7 +72,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { s.DecisionType = &o case "default_left": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -86,7 +86,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { } case "leaf_value": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -103,7 +103,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { case "left_child": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -119,7 +119,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { case "node_index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -135,7 +135,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { case "right_child": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -151,7 +151,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { case "split_feature": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -167,7 +167,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { case "split_gain": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -182,7 +182,7 @@ func (s *TrainedModelTreeNode) UnmarshalJSON(data []byte) error { } case "threshold": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/transformauthorization.go b/typedapi/types/transformauthorization.go index 0260fd654a..c84b62d38b 100644 --- a/typedapi/types/transformauthorization.go +++ b/typedapi/types/transformauthorization.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TransformAuthorization type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/Authorization.ts#L59-L71 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/Authorization.ts#L59-L71 type TransformAuthorization struct { // ApiKey If an API key was used for the most recent update to the transform, its name // and identifier are listed in the response. diff --git a/typedapi/types/transformcontainer.go b/typedapi/types/transformcontainer.go index 539d1c4aea..c216086d44 100644 --- a/typedapi/types/transformcontainer.go +++ b/typedapi/types/transformcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // TransformContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Transform.ts#L27-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Transform.ts#L27-L34 type TransformContainer struct { Chain []TransformContainer `json:"chain,omitempty"` Script *ScriptTransform `json:"script,omitempty"` diff --git a/typedapi/types/transformdestination.go b/typedapi/types/transformdestination.go index 4ec0cb0149..27fc48125c 100644 --- a/typedapi/types/transformdestination.go +++ b/typedapi/types/transformdestination.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TransformDestination type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/_types/Transform.ts#L34-L45 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/_types/Transform.ts#L34-L45 type TransformDestination struct { // Index The destination index for the transform. The mappings of the destination // index are deduced based on the source diff --git a/typedapi/types/transformindexerstats.go b/typedapi/types/transformindexerstats.go index c1f42f9da0..3212e4d67f 100644 --- a/typedapi/types/transformindexerstats.go +++ b/typedapi/types/transformindexerstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TransformIndexerStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/get_transform_stats/types.ts#L56-L74 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/get_transform_stats/types.ts#L56-L74 type TransformIndexerStats struct { DeleteTimeInMs *int64 `json:"delete_time_in_ms,omitempty"` DocumentsDeleted *int64 `json:"documents_deleted,omitempty"` @@ -73,7 +73,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { } case "documents_deleted": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { } case "documents_indexed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -103,7 +103,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { } case "documents_processed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -123,7 +123,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { } case "exponential_avg_documents_indexed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -139,7 +139,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { } case "exponential_avg_documents_processed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -155,7 +155,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { } case "index_failures": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -175,7 +175,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { } case "index_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -190,7 +190,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { } case "pages_processed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -210,7 +210,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { } case "processing_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -225,7 +225,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { } case "search_failures": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -245,7 +245,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { } case "search_total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -260,7 +260,7 @@ func (s *TransformIndexerStats) UnmarshalJSON(data []byte) error { } case "trigger_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/transformprogress.go b/typedapi/types/transformprogress.go index dc6361b244..f5cfea82a9 100644 --- a/typedapi/types/transformprogress.go +++ b/typedapi/types/transformprogress.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TransformProgress type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/get_transform_stats/types.ts#L48-L54 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/get_transform_stats/types.ts#L48-L54 type TransformProgress struct { DocsIndexed int64 `json:"docs_indexed"` DocsProcessed int64 `json:"docs_processed"` @@ -56,7 +56,7 @@ func (s *TransformProgress) UnmarshalJSON(data []byte) error { switch t { case "docs_indexed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *TransformProgress) UnmarshalJSON(data []byte) error { } case "docs_processed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -86,7 +86,7 @@ func (s *TransformProgress) UnmarshalJSON(data []byte) error { } case "docs_remaining": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -101,7 +101,7 @@ func (s *TransformProgress) UnmarshalJSON(data []byte) error { } case "percent_complete": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -117,7 +117,7 @@ func (s *TransformProgress) UnmarshalJSON(data []byte) error { } case "total_docs": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/transformsource.go b/typedapi/types/transformsource.go index b6eb84a30a..9905956b61 100644 --- a/typedapi/types/transformsource.go +++ b/typedapi/types/transformsource.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TransformSource type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/_types/Transform.ts#L146-L165 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/_types/Transform.ts#L146-L165 type TransformSource struct { // Index The source indices for the transform. It can be a single index, an index // pattern (for example, `"my-index-*""`), an diff --git a/typedapi/types/transformsrecord.go b/typedapi/types/transformsrecord.go index 4921c7f113..f15be86640 100644 --- a/typedapi/types/transformsrecord.go +++ b/typedapi/types/transformsrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,17 +31,17 @@ import ( // TransformsRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cat/transforms/types.ts#L22-L197 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cat/transforms/types.ts#L22-L197 type TransformsRecord struct { // ChangesLastDetectionTime The timestamp when changes were last detected in the source indices. - ChangesLastDetectionTime string `json:"changes_last_detection_time,omitempty"` + ChangesLastDetectionTime *string `json:"changes_last_detection_time,omitempty"` // Checkpoint The sequence number for the checkpoint. Checkpoint *string `json:"checkpoint,omitempty"` // CheckpointDurationTimeExpAvg The exponential moving average of the duration of the checkpoint, in // milliseconds. CheckpointDurationTimeExpAvg *string `json:"checkpoint_duration_time_exp_avg,omitempty"` // CheckpointProgress The progress of the next checkpoint that is currently in progress. - CheckpointProgress string `json:"checkpoint_progress,omitempty"` + CheckpointProgress *string `json:"checkpoint_progress,omitempty"` // CreateTime The time the transform was created. CreateTime *string `json:"create_time,omitempty"` // DeleteTime The total time spent deleting documents, in milliseconds. @@ -77,7 +77,7 @@ type TransformsRecord struct { IndexedDocumentsExpAvg *string `json:"indexed_documents_exp_avg,omitempty"` // LastSearchTime The timestamp of the last search in the source indices. // This field is shown only if the transform is running. - LastSearchTime string `json:"last_search_time,omitempty"` + LastSearchTime *string `json:"last_search_time,omitempty"` // MaxPageSearchSize The initial page size that is used for the composite aggregation for each // checkpoint. MaxPageSearchSize *string `json:"max_page_search_size,omitempty"` @@ -150,7 +150,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.ChangesLastDetectionTime = o + s.ChangesLastDetectionTime = &o case "checkpoint", "c": var tmp json.RawMessage @@ -186,7 +186,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.CheckpointProgress = o + s.CheckpointProgress = &o case "create_time", "ct", "createTime": var tmp json.RawMessage @@ -359,7 +359,7 @@ func (s *TransformsRecord) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.LastSearchTime = o + s.LastSearchTime = &o case "max_page_search_size", "mpsz": var tmp json.RawMessage diff --git a/typedapi/types/transformstats.go b/typedapi/types/transformstats.go index 8136365551..c61fb86be6 100644 --- a/typedapi/types/transformstats.go +++ b/typedapi/types/transformstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TransformStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/get_transform_stats/types.ts#L31-L42 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/get_transform_stats/types.ts#L31-L42 type TransformStats struct { Checkpointing Checkpointing `json:"checkpointing"` Health *TransformStatsHealth `json:"health,omitempty"` diff --git a/typedapi/types/transformstatshealth.go b/typedapi/types/transformstatshealth.go index d3f720ec67..a87cf477f0 100644 --- a/typedapi/types/transformstatshealth.go +++ b/typedapi/types/transformstatshealth.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // TransformStatsHealth type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/get_transform_stats/types.ts#L44-L46 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/get_transform_stats/types.ts#L44-L46 type TransformStatsHealth struct { Status healthstatus.HealthStatus `json:"status"` } diff --git a/typedapi/types/transformsummary.go b/typedapi/types/transformsummary.go index b033d56592..276001965d 100644 --- a/typedapi/types/transformsummary.go +++ b/typedapi/types/transformsummary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TransformSummary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/transform/get_transform/types.ts#L33-L61 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/transform/get_transform/types.ts#L33-L61 type TransformSummary struct { // Authorization The security privileges that the transform uses to run its queries. If // Elastic Stack security features were disabled at the time of the most recent diff --git a/typedapi/types/translog.go b/typedapi/types/translog.go index d7e51a17b1..a1ff6455f7 100644 --- a/typedapi/types/translog.go +++ b/typedapi/types/translog.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // Translog type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L339-L361 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L341-L363 type Translog struct { // Durability Whether or not to `fsync` and commit the translog after every index, delete, // update, or bulk request. diff --git a/typedapi/types/translogretention.go b/typedapi/types/translogretention.go index 948bf3bcf3..bdc2d9397d 100644 --- a/typedapi/types/translogretention.go +++ b/typedapi/types/translogretention.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TranslogRetention type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/_types/IndexSettings.ts#L380-L399 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/_types/IndexSettings.ts#L382-L401 type TranslogRetention struct { // Age This controls the maximum duration for which translog files are kept by each // shard. Keeping more diff --git a/typedapi/types/translogstats.go b/typedapi/types/translogstats.go index de3c3821b2..b1c2a4adc8 100644 --- a/typedapi/types/translogstats.go +++ b/typedapi/types/translogstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TranslogStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L397-L405 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L397-L405 type TranslogStats struct { EarliestLastModifiedAge int64 `json:"earliest_last_modified_age"` Operations int64 `json:"operations"` @@ -58,7 +58,7 @@ func (s *TranslogStats) UnmarshalJSON(data []byte) error { switch t { case "earliest_last_modified_age": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -73,7 +73,7 @@ func (s *TranslogStats) UnmarshalJSON(data []byte) error { } case "operations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -100,7 +100,7 @@ func (s *TranslogStats) UnmarshalJSON(data []byte) error { s.Size = &o case "size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -116,7 +116,7 @@ func (s *TranslogStats) UnmarshalJSON(data []byte) error { case "uncommitted_operations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -143,7 +143,7 @@ func (s *TranslogStats) UnmarshalJSON(data []byte) error { s.UncommittedSize = &o case "uncommitted_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/translogstatus.go b/typedapi/types/translogstatus.go index b2ab75cbff..35ce1c61dd 100644 --- a/typedapi/types/translogstatus.go +++ b/typedapi/types/translogstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TranslogStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/recovery/types.ts#L102-L109 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/recovery/types.ts#L102-L109 type TranslogStatus struct { Percent Percentage `json:"percent"` Recovered int64 `json:"recovered"` @@ -62,7 +62,7 @@ func (s *TranslogStatus) UnmarshalJSON(data []byte) error { } case "recovered": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -77,7 +77,7 @@ func (s *TranslogStatus) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -92,7 +92,7 @@ func (s *TranslogStatus) UnmarshalJSON(data []byte) error { } case "total_on_start": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/transport.go b/typedapi/types/transport.go index 5185051bea..82fc2f0f11 100644 --- a/typedapi/types/transport.go +++ b/typedapi/types/transport.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Transport type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L1047-L1090 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L1047-L1090 type Transport struct { // InboundHandlingTimeHistogram The distribution of the time spent handling each inbound message on a // transport thread, represented as a histogram. @@ -94,7 +94,7 @@ func (s *Transport) UnmarshalJSON(data []byte) error { } case "rx_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -121,7 +121,7 @@ func (s *Transport) UnmarshalJSON(data []byte) error { s.RxSize = &o case "rx_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -137,7 +137,7 @@ func (s *Transport) UnmarshalJSON(data []byte) error { case "server_open": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -152,7 +152,7 @@ func (s *Transport) UnmarshalJSON(data []byte) error { } case "total_outbound_connections": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -167,7 +167,7 @@ func (s *Transport) UnmarshalJSON(data []byte) error { } case "tx_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -194,7 +194,7 @@ func (s *Transport) UnmarshalJSON(data []byte) error { s.TxSize = &o case "tx_size_in_bytes": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/transporthistogram.go b/typedapi/types/transporthistogram.go index 043072df11..7543e12b98 100644 --- a/typedapi/types/transporthistogram.go +++ b/typedapi/types/transporthistogram.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TransportHistogram type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/nodes/_types/Stats.ts#L1092-L1106 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/nodes/_types/Stats.ts#L1092-L1106 type TransportHistogram struct { // Count The number of times a transport thread took a period of time within the // bounds of this bucket to handle an inbound message. @@ -60,7 +60,7 @@ func (s *TransportHistogram) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -75,7 +75,7 @@ func (s *TransportHistogram) UnmarshalJSON(data []byte) error { } case "ge_millis": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -90,7 +90,7 @@ func (s *TransportHistogram) UnmarshalJSON(data []byte) error { } case "lt_millis": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/triggercontainer.go b/typedapi/types/triggercontainer.go index 2186e5c649..dbe722fe1d 100644 --- a/typedapi/types/triggercontainer.go +++ b/typedapi/types/triggercontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // TriggerContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Trigger.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Trigger.ts#L23-L28 type TriggerContainer struct { Schedule *ScheduleContainer `json:"schedule,omitempty"` } diff --git a/typedapi/types/triggereventcontainer.go b/typedapi/types/triggereventcontainer.go index 3d154cfe61..52f677a6f7 100644 --- a/typedapi/types/triggereventcontainer.go +++ b/typedapi/types/triggereventcontainer.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // TriggerEventContainer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Trigger.ts#L32-L37 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Trigger.ts#L32-L37 type TriggerEventContainer struct { Schedule *ScheduleTriggerEvent `json:"schedule,omitempty"` } diff --git a/typedapi/types/triggereventresult.go b/typedapi/types/triggereventresult.go index 8a776c8078..9514b58760 100644 --- a/typedapi/types/triggereventresult.go +++ b/typedapi/types/triggereventresult.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TriggerEventResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Trigger.ts#L39-L43 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Trigger.ts#L39-L43 type TriggerEventResult struct { Manual TriggerEventContainer `json:"manual"` TriggeredTime DateTime `json:"triggered_time"` diff --git a/typedapi/types/trimprocessor.go b/typedapi/types/trimprocessor.go index ec551f45d8..f9afb8ac09 100644 --- a/typedapi/types/trimprocessor.go +++ b/typedapi/types/trimprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TrimProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L1120-L1136 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L1124-L1140 type TrimProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -100,7 +100,7 @@ func (s *TrimProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *TrimProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/trimtokenfilter.go b/typedapi/types/trimtokenfilter.go index ba47473025..6b696cd6ed 100644 --- a/typedapi/types/trimtokenfilter.go +++ b/typedapi/types/trimtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // TrimTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L326-L328 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L328-L330 type TrimTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` diff --git a/typedapi/types/truncatetokenfilter.go b/typedapi/types/truncatetokenfilter.go index 2e240c5653..c4cef8163d 100644 --- a/typedapi/types/truncatetokenfilter.go +++ b/typedapi/types/truncatetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TruncateTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L330-L333 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L332-L335 type TruncateTokenFilter struct { Length *int `json:"length,omitempty"` Type string `json:"type,omitempty"` @@ -55,7 +55,7 @@ func (s *TruncateTokenFilter) UnmarshalJSON(data []byte) error { case "length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/ttestaggregate.go b/typedapi/types/ttestaggregate.go index 0c334e0f5b..39cad50c2a 100644 --- a/typedapi/types/ttestaggregate.go +++ b/typedapi/types/ttestaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,10 +31,10 @@ import ( // TTestAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L735-L739 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L742-L746 type TTestAggregate struct { Meta Metadata `json:"meta,omitempty"` - Value Float64 `json:"value,omitempty"` + Value *Float64 `json:"value,omitempty"` ValueAsString *string `json:"value_as_string,omitempty"` } diff --git a/typedapi/types/ttestaggregation.go b/typedapi/types/ttestaggregation.go index f7fd50a78c..0bde06ba27 100644 --- a/typedapi/types/ttestaggregation.go +++ b/typedapi/types/ttestaggregation.go @@ -16,87 +16,26 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "io" - "strconv" - "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/ttesttype" ) // TTestAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L294-L308 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L294-L308 type TTestAggregation struct { // A Test population A. A *TestPopulation `json:"a,omitempty"` // B Test population B. - B *TestPopulation `json:"b,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + B *TestPopulation `json:"b,omitempty"` // Type The type of test. Type *ttesttype.TTestType `json:"type,omitempty"` } -func (s *TTestAggregation) UnmarshalJSON(data []byte) error { - - dec := json.NewDecoder(bytes.NewReader(data)) - - for { - t, err := dec.Token() - if err != nil { - if errors.Is(err, io.EOF) { - break - } - return err - } - - switch t { - - case "a": - if err := dec.Decode(&s.A); err != nil { - return fmt.Errorf("%s | %w", "A", err) - } - - case "b": - if err := dec.Decode(&s.B); err != nil { - return fmt.Errorf("%s | %w", "B", err) - } - - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - - case "type": - if err := dec.Decode(&s.Type); err != nil { - return fmt.Errorf("%s | %w", "Type", err) - } - - } - } - return nil -} - // NewTTestAggregation returns a TTestAggregation. func NewTTestAggregation() *TTestAggregation { r := &TTestAggregation{} diff --git a/typedapi/types/typefieldmappings.go b/typedapi/types/typefieldmappings.go index 64329de859..331fa0318a 100644 --- a/typedapi/types/typefieldmappings.go +++ b/typedapi/types/typefieldmappings.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // TypeFieldMappings type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/get_field_mapping/types.ts#L24-L26 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/get_field_mapping/types.ts#L24-L26 type TypeFieldMappings struct { Mappings map[string]FieldMapping `json:"mappings"` } diff --git a/typedapi/types/typemapping.go b/typedapi/types/typemapping.go index f7d9d58322..5da1abc062 100644 --- a/typedapi/types/typemapping.go +++ b/typedapi/types/typemapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // TypeMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/TypeMapping.ts#L34-L57 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/TypeMapping.ts#L34-L57 type TypeMapping struct { AllField *AllField `json:"all_field,omitempty"` DataStreamTimestamp_ *DataStreamTimestamp `json:"_data_stream_timestamp,omitempty"` @@ -80,7 +80,7 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { } case "date_detection": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -109,7 +109,7 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -138,7 +138,7 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { } case "numeric_detection": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -158,7 +158,7 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -179,7 +179,7 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -269,12 +269,6 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -293,6 +287,18 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -449,6 +455,12 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -482,7 +494,7 @@ func (s *TypeMapping) UnmarshalJSON(data []byte) error { } case "subobjects": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/typequery.go b/typedapi/types/typequery.go index 8923d93676..ea3771d9be 100644 --- a/typedapi/types/typequery.go +++ b/typedapi/types/typequery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // TypeQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L264-L266 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L269-L271 type TypeQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -59,7 +59,7 @@ func (s *TypeQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/uaxemailurltokenizer.go b/typedapi/types/uaxemailurltokenizer.go index f10c44c1ba..059189ebf5 100644 --- a/typedapi/types/uaxemailurltokenizer.go +++ b/typedapi/types/uaxemailurltokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // UaxEmailUrlTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L110-L113 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L110-L113 type UaxEmailUrlTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Type string `json:"type,omitempty"` @@ -55,7 +55,7 @@ func (s *UaxEmailUrlTokenizer) UnmarshalJSON(data []byte) error { case "max_token_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/unassignedinformation.go b/typedapi/types/unassignedinformation.go index 89ddd304b2..ca93076521 100644 --- a/typedapi/types/unassignedinformation.go +++ b/typedapi/types/unassignedinformation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // UnassignedInformation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/cluster/allocation_explain/types.ts#L117-L125 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/cluster/allocation_explain/types.ts#L117-L125 type UnassignedInformation struct { AllocationStatus *string `json:"allocation_status,omitempty"` At DateTime `json:"at"` @@ -77,7 +77,7 @@ func (s *UnassignedInformation) UnmarshalJSON(data []byte) error { } case "delayed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -104,7 +104,7 @@ func (s *UnassignedInformation) UnmarshalJSON(data []byte) error { case "failed_allocation_attempts": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/uniquetokenfilter.go b/typedapi/types/uniquetokenfilter.go index ad54c696e0..1a00d58471 100644 --- a/typedapi/types/uniquetokenfilter.go +++ b/typedapi/types/uniquetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // UniqueTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L335-L338 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L337-L340 type UniqueTokenFilter struct { OnlyOnSamePosition *bool `json:"only_on_same_position,omitempty"` Type string `json:"type,omitempty"` @@ -54,7 +54,7 @@ func (s *UniqueTokenFilter) UnmarshalJSON(data []byte) error { switch t { case "only_on_same_position": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/unmappedraretermsaggregate.go b/typedapi/types/unmappedraretermsaggregate.go index 2588aa0ef8..5c056bf8a2 100644 --- a/typedapi/types/unmappedraretermsaggregate.go +++ b/typedapi/types/unmappedraretermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // UnmappedRareTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L453-L459 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L455-L461 type UnmappedRareTermsAggregate struct { Buckets BucketsVoid `json:"buckets"` Meta Metadata `json:"meta,omitempty"` @@ -59,13 +59,13 @@ func (s *UnmappedRareTermsAggregate) UnmarshalJSON(data []byte) error { localDec := json.NewDecoder(source) switch rawMsg[0] { case '{': - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := localDec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': - o := []interface{}{} + o := []any{} if err := localDec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Buckets", err) } diff --git a/typedapi/types/unmappedsampleraggregate.go b/typedapi/types/unmappedsampleraggregate.go index babdd6ce4f..4df77eecfa 100644 --- a/typedapi/types/unmappedsampleraggregate.go +++ b/typedapi/types/unmappedsampleraggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // UnmappedSamplerAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L501-L502 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L505-L506 type UnmappedSamplerAggregate struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -55,7 +55,7 @@ func (s *UnmappedSamplerAggregate) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +519,7 @@ func (s *UnmappedSamplerAggregate) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -569,7 +569,7 @@ func (s *UnmappedSamplerAggregate) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -579,7 +579,7 @@ func (s *UnmappedSamplerAggregate) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -596,7 +596,7 @@ func (s *UnmappedSamplerAggregate) UnmarshalJSON(data []byte) error { func (s UnmappedSamplerAggregate) MarshalJSON() ([]byte, error) { type opt UnmappedSamplerAggregate // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/unmappedsignificanttermsaggregate.go b/typedapi/types/unmappedsignificanttermsaggregate.go index 06e0ed2220..5bcb9dde3d 100644 --- a/typedapi/types/unmappedsignificanttermsaggregate.go +++ b/typedapi/types/unmappedsignificanttermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // UnmappedSignificantTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L610-L616 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L614-L620 type UnmappedSignificantTermsAggregate struct { BgCount *int64 `json:"bg_count,omitempty"` Buckets BucketsVoid `json:"buckets"` @@ -55,7 +55,7 @@ func (s *UnmappedSignificantTermsAggregate) UnmarshalJSON(data []byte) error { switch t { case "bg_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -77,13 +77,13 @@ func (s *UnmappedSignificantTermsAggregate) UnmarshalJSON(data []byte) error { localDec := json.NewDecoder(source) switch rawMsg[0] { case '{': - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := localDec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': - o := []interface{}{} + o := []any{} if err := localDec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Buckets", err) } @@ -91,7 +91,7 @@ func (s *UnmappedSignificantTermsAggregate) UnmarshalJSON(data []byte) error { } case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/unmappedtermsaggregate.go b/typedapi/types/unmappedtermsaggregate.go index 8b9c03e26e..dd85dfa584 100644 --- a/typedapi/types/unmappedtermsaggregate.go +++ b/typedapi/types/unmappedtermsaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // UnmappedTermsAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L423-L429 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L425-L431 type UnmappedTermsAggregate struct { Buckets BucketsVoid `json:"buckets"` DocCountErrorUpperBound *int64 `json:"doc_count_error_upper_bound,omitempty"` @@ -62,13 +62,13 @@ func (s *UnmappedTermsAggregate) UnmarshalJSON(data []byte) error { localDec := json.NewDecoder(source) switch rawMsg[0] { case '{': - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := localDec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Buckets", err) } s.Buckets = o case '[': - o := []interface{}{} + o := []any{} if err := localDec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Buckets", err) } @@ -76,7 +76,7 @@ func (s *UnmappedTermsAggregate) UnmarshalJSON(data []byte) error { } case "doc_count_error_upper_bound": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *UnmappedTermsAggregate) UnmarshalJSON(data []byte) error { } case "sum_other_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/unrateddocument.go b/typedapi/types/unrateddocument.go index 3b9156b524..b86c569987 100644 --- a/typedapi/types/unrateddocument.go +++ b/typedapi/types/unrateddocument.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // UnratedDocument type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/rank_eval/types.ts#L147-L150 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/rank_eval/types.ts#L147-L150 type UnratedDocument struct { Id_ string `json:"_id"` Index_ string `json:"_index"` diff --git a/typedapi/types/unsignedlongnumberproperty.go b/typedapi/types/unsignedlongnumberproperty.go index d2710aa5af..5de0a652a1 100644 --- a/typedapi/types/unsignedlongnumberproperty.go +++ b/typedapi/types/unsignedlongnumberproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -35,7 +35,7 @@ import ( // UnsignedLongNumberProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L169-L172 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L177-L180 type UnsignedLongNumberProperty struct { Boost *Float64 `json:"boost,omitempty"` Coerce *bool `json:"coerce,omitempty"` @@ -79,7 +79,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { } case "coerce": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -125,7 +125,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -150,7 +150,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -171,7 +171,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -261,12 +261,6 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -285,6 +279,18 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -441,6 +447,12 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -452,7 +464,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -467,7 +479,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { } case "ignore_malformed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -481,7 +493,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { } case "index": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -519,7 +531,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -540,7 +552,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -630,12 +642,6 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -654,6 +660,18 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -810,6 +828,12 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -868,7 +892,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -882,7 +906,7 @@ func (s *UnsignedLongNumberProperty) UnmarshalJSON(data []byte) error { } case "time_series_dimension": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/untypeddecayfunction.go b/typedapi/types/untypeddecayfunction.go new file mode 100644 index 0000000000..7a0c609e7e --- /dev/null +++ b/typedapi/types/untypeddecayfunction.go @@ -0,0 +1,76 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "encoding/json" + "fmt" + + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/multivaluemode" +) + +// UntypedDecayFunction type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/compound.ts#L188-L191 +type UntypedDecayFunction struct { + DecayFunctionBase map[string]DecayPlacement `json:"-"` + // MultiValueMode Determines how the distance is calculated when a field used for computing the + // decay contains multiple values. + MultiValueMode *multivaluemode.MultiValueMode `json:"multi_value_mode,omitempty"` +} + +// MarhsalJSON overrides marshalling for types with additional properties +func (s UntypedDecayFunction) MarshalJSON() ([]byte, error) { + type opt UntypedDecayFunction + // We transform the struct to a map without the embedded additional properties map + tmp := make(map[string]any, 0) + + data, err := json.Marshal(opt(s)) + if err != nil { + return nil, err + } + err = json.Unmarshal(data, &tmp) + if err != nil { + return nil, err + } + + // We inline the additional fields from the underlying map + for key, value := range s.DecayFunctionBase { + tmp[fmt.Sprintf("%s", key)] = value + } + delete(tmp, "DecayFunctionBase") + + data, err = json.Marshal(tmp) + if err != nil { + return nil, err + } + + return data, nil +} + +// NewUntypedDecayFunction returns a UntypedDecayFunction. +func NewUntypedDecayFunction() *UntypedDecayFunction { + r := &UntypedDecayFunction{ + DecayFunctionBase: make(map[string]DecayPlacement, 0), + } + + return r +} diff --git a/typedapi/types/untypeddistancefeaturequery.go b/typedapi/types/untypeddistancefeaturequery.go new file mode 100644 index 0000000000..5ca98f35c4 --- /dev/null +++ b/typedapi/types/untypeddistancefeaturequery.go @@ -0,0 +1,133 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" +) + +// UntypedDistanceFeatureQuery type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/specialized.ts#L62-L65 +type UntypedDistanceFeatureQuery struct { + // Boost Floating point number used to decrease or increase the relevance scores of + // the query. + // Boost values are relative to the default value of 1.0. + // A boost value between 0 and 1.0 decreases the relevance score. + // A value greater than 1.0 increases the relevance score. + Boost *float32 `json:"boost,omitempty"` + // Field Name of the field used to calculate distances. This field must meet the + // following criteria: + // be a `date`, `date_nanos` or `geo_point` field; + // have an `index` mapping parameter value of `true`, which is the default; + // have an `doc_values` mapping parameter value of `true`, which is the default. + Field string `json:"field"` + // Origin Date or point of origin used to calculate distances. + // If the `field` value is a `date` or `date_nanos` field, the `origin` value + // must be a date. + // Date Math, such as `now-1h`, is supported. + // If the field value is a `geo_point` field, the `origin` value must be a + // geopoint. + Origin json.RawMessage `json:"origin,omitempty"` + // Pivot Distance from the `origin` at which relevance scores receive half of the + // `boost` value. + // If the `field` value is a `date` or `date_nanos` field, the `pivot` value + // must be a time unit, such as `1h` or `10d`. If the `field` value is a + // `geo_point` field, the `pivot` value must be a distance unit, such as `1km` + // or `12m`. + Pivot json.RawMessage `json:"pivot,omitempty"` + QueryName_ *string `json:"_name,omitempty"` +} + +func (s *UntypedDistanceFeatureQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return fmt.Errorf("%s | %w", "Boost", err) + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "field": + if err := dec.Decode(&s.Field); err != nil { + return fmt.Errorf("%s | %w", "Field", err) + } + + case "origin": + if err := dec.Decode(&s.Origin); err != nil { + return fmt.Errorf("%s | %w", "Origin", err) + } + + case "pivot": + if err := dec.Decode(&s.Pivot); err != nil { + return fmt.Errorf("%s | %w", "Pivot", err) + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "QueryName_", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.QueryName_ = &o + + } + } + return nil +} + +// NewUntypedDistanceFeatureQuery returns a UntypedDistanceFeatureQuery. +func NewUntypedDistanceFeatureQuery() *UntypedDistanceFeatureQuery { + r := &UntypedDistanceFeatureQuery{} + + return r +} diff --git a/typedapi/types/untypedrangequery.go b/typedapi/types/untypedrangequery.go new file mode 100644 index 0000000000..6375349bd2 --- /dev/null +++ b/typedapi/types/untypedrangequery.go @@ -0,0 +1,162 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated from the elasticsearch-specification DO NOT EDIT. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +package types + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" + "io" + "strconv" + + "github.com/elastic/go-elasticsearch/v8/typedapi/types/enums/rangerelation" +) + +// UntypedRangeQuery type. +// +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L135-L144 +type UntypedRangeQuery struct { + // Boost Floating point number used to decrease or increase the relevance scores of + // the query. + // Boost values are relative to the default value of 1.0. + // A boost value between 0 and 1.0 decreases the relevance score. + // A value greater than 1.0 increases the relevance score. + Boost *float32 `json:"boost,omitempty"` + // Format Date format used to convert `date` values in the query. + Format *string `json:"format,omitempty"` + From *json.RawMessage `json:"from,omitempty"` + // Gt Greater than. + Gt json.RawMessage `json:"gt,omitempty"` + // Gte Greater than or equal to. + Gte json.RawMessage `json:"gte,omitempty"` + // Lt Less than. + Lt json.RawMessage `json:"lt,omitempty"` + // Lte Less than or equal to. + Lte json.RawMessage `json:"lte,omitempty"` + QueryName_ *string `json:"_name,omitempty"` + // Relation Indicates how the range query matches values for `range` fields. + Relation *rangerelation.RangeRelation `json:"relation,omitempty"` + // TimeZone Coordinated Universal Time (UTC) offset or IANA time zone used to convert + // `date` values in the query to UTC. + TimeZone *string `json:"time_zone,omitempty"` + To *json.RawMessage `json:"to,omitempty"` +} + +func (s *UntypedRangeQuery) UnmarshalJSON(data []byte) error { + + dec := json.NewDecoder(bytes.NewReader(data)) + + for { + t, err := dec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return err + } + + switch t { + + case "boost": + var tmp any + dec.Decode(&tmp) + switch v := tmp.(type) { + case string: + value, err := strconv.ParseFloat(v, 32) + if err != nil { + return fmt.Errorf("%s | %w", "Boost", err) + } + f := float32(value) + s.Boost = &f + case float64: + f := float32(v) + s.Boost = &f + } + + case "format": + if err := dec.Decode(&s.Format); err != nil { + return fmt.Errorf("%s | %w", "Format", err) + } + + case "from": + if err := dec.Decode(&s.From); err != nil { + return fmt.Errorf("%s | %w", "From", err) + } + + case "gt": + if err := dec.Decode(&s.Gt); err != nil { + return fmt.Errorf("%s | %w", "Gt", err) + } + + case "gte": + if err := dec.Decode(&s.Gte); err != nil { + return fmt.Errorf("%s | %w", "Gte", err) + } + + case "lt": + if err := dec.Decode(&s.Lt); err != nil { + return fmt.Errorf("%s | %w", "Lt", err) + } + + case "lte": + if err := dec.Decode(&s.Lte); err != nil { + return fmt.Errorf("%s | %w", "Lte", err) + } + + case "_name": + var tmp json.RawMessage + if err := dec.Decode(&tmp); err != nil { + return fmt.Errorf("%s | %w", "QueryName_", err) + } + o := string(tmp[:]) + o, err = strconv.Unquote(o) + if err != nil { + o = string(tmp[:]) + } + s.QueryName_ = &o + + case "relation": + if err := dec.Decode(&s.Relation); err != nil { + return fmt.Errorf("%s | %w", "Relation", err) + } + + case "time_zone": + if err := dec.Decode(&s.TimeZone); err != nil { + return fmt.Errorf("%s | %w", "TimeZone", err) + } + + case "to": + if err := dec.Decode(&s.To); err != nil { + return fmt.Errorf("%s | %w", "To", err) + } + + } + } + return nil +} + +// NewUntypedRangeQuery returns a UntypedRangeQuery. +func NewUntypedRangeQuery() *UntypedRangeQuery { + r := &UntypedRangeQuery{} + + return r +} diff --git a/typedapi/types/updateaction.go b/typedapi/types/updateaction.go index 8d90a00bbd..b0466731b8 100644 --- a/typedapi/types/updateaction.go +++ b/typedapi/types/updateaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // UpdateAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/bulk/types.ts#L169-L205 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/bulk/types.ts#L169-L205 type UpdateAction struct { // DetectNoop Set to false to disable setting 'result' in the response // to 'noop' if no change to the document occurred. @@ -70,7 +70,7 @@ func (s *UpdateAction) UnmarshalJSON(data []byte) error { switch t { case "detect_noop": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -89,7 +89,7 @@ func (s *UpdateAction) UnmarshalJSON(data []byte) error { } case "doc_as_upsert": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -139,7 +139,7 @@ func (s *UpdateAction) UnmarshalJSON(data []byte) error { } case "scripted_upsert": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/updatebyqueryrethrottlenode.go b/typedapi/types/updatebyqueryrethrottlenode.go index aaed4fc8f1..3bca67557f 100644 --- a/typedapi/types/updatebyqueryrethrottlenode.go +++ b/typedapi/types/updatebyqueryrethrottlenode.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // UpdateByQueryRethrottleNode type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/update_by_query_rethrottle/UpdateByQueryRethrottleNode.ts#L25-L27 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/update_by_query_rethrottle/UpdateByQueryRethrottleNode.ts#L25-L27 type UpdateByQueryRethrottleNode struct { Attributes map[string]string `json:"attributes"` Host string `json:"host"` diff --git a/typedapi/types/updateoperation.go b/typedapi/types/updateoperation.go index 088ef7b34e..69e7805f98 100644 --- a/typedapi/types/updateoperation.go +++ b/typedapi/types/updateoperation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // UpdateOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/bulk/types.ts#L136-L143 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/bulk/types.ts#L136-L143 type UpdateOperation struct { // Id_ The document ID. Id_ *string `json:"_id,omitempty"` @@ -71,7 +71,7 @@ func (s *UpdateOperation) UnmarshalJSON(data []byte) error { } case "if_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *UpdateOperation) UnmarshalJSON(data []byte) error { } case "require_alias": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -111,7 +111,7 @@ func (s *UpdateOperation) UnmarshalJSON(data []byte) error { case "retry_on_conflict": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/uppercaseprocessor.go b/typedapi/types/uppercaseprocessor.go index 04d2211a3a..b38da9b08a 100644 --- a/typedapi/types/uppercaseprocessor.go +++ b/typedapi/types/uppercaseprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // UppercaseProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L1138-L1154 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L1142-L1158 type UppercaseProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -100,7 +100,7 @@ func (s *UppercaseProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *UppercaseProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/uppercasetokenfilter.go b/typedapi/types/uppercasetokenfilter.go index 86959d5825..63c73bba11 100644 --- a/typedapi/types/uppercasetokenfilter.go +++ b/typedapi/types/uppercasetokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // UppercaseTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L340-L342 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L342-L344 type UppercaseTokenFilter struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` diff --git a/typedapi/types/urldecodeprocessor.go b/typedapi/types/urldecodeprocessor.go index d4a4fe872c..d0c260de4a 100644 --- a/typedapi/types/urldecodeprocessor.go +++ b/typedapi/types/urldecodeprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // UrlDecodeProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L1156-L1172 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L1160-L1176 type UrlDecodeProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -100,7 +100,7 @@ func (s *UrlDecodeProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -114,7 +114,7 @@ func (s *UrlDecodeProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/usagestatsindex.go b/typedapi/types/usagestatsindex.go index f5cffcc365..4a34df0f4c 100644 --- a/typedapi/types/usagestatsindex.go +++ b/typedapi/types/usagestatsindex.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // UsageStatsIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L38-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L41-L43 type UsageStatsIndex struct { Shards []UsageStatsShards `json:"shards"` } diff --git a/typedapi/types/usagestatsshards.go b/typedapi/types/usagestatsshards.go index b12541c866..e32f06d810 100644 --- a/typedapi/types/usagestatsshards.go +++ b/typedapi/types/usagestatsshards.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // UsageStatsShards type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L42-L47 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/field_usage_stats/IndicesFieldUsageStatsResponse.ts#L45-L50 type UsageStatsShards struct { Routing ShardRouting `json:"routing"` Stats IndicesShardsStats `json:"stats"` diff --git a/typedapi/types/user.go b/typedapi/types/user.go index 128b2a88c3..8f1fdfbe75 100644 --- a/typedapi/types/user.go +++ b/typedapi/types/user.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,11 +31,11 @@ import ( // User type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/User.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/User.ts#L23-L31 type User struct { - Email string `json:"email,omitempty"` + Email *string `json:"email,omitempty"` Enabled bool `json:"enabled"` - FullName string `json:"full_name,omitempty"` + FullName *string `json:"full_name,omitempty"` Metadata Metadata `json:"metadata"` ProfileUid *string `json:"profile_uid,omitempty"` Roles []string `json:"roles"` @@ -67,10 +67,10 @@ func (s *User) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.Email = o + s.Email = &o case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/useragentprocessor.go b/typedapi/types/useragentprocessor.go index a6e87ad67c..1ec26b05a7 100644 --- a/typedapi/types/useragentprocessor.go +++ b/typedapi/types/useragentprocessor.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // UserAgentProcessor type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ingest/_types/Processors.ts#L370-L390 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ingest/_types/Processors.ts#L370-L390 type UserAgentProcessor struct { // Description Description of the processor. // Useful for describing the purpose of the processor or its configuration. @@ -108,7 +108,7 @@ func (s *UserAgentProcessor) UnmarshalJSON(data []byte) error { s.If = &o case "ignore_failure": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -122,7 +122,7 @@ func (s *UserAgentProcessor) UnmarshalJSON(data []byte) error { } case "ignore_missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/userindicesprivileges.go b/typedapi/types/userindicesprivileges.go index a24c806a41..8498f1390c 100644 --- a/typedapi/types/userindicesprivileges.go +++ b/typedapi/types/userindicesprivileges.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // UserIndicesPrivileges type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/Privileges.ts#L107-L129 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/Privileges.ts#L226-L248 type UserIndicesPrivileges struct { // AllowRestrictedIndices Set to `true` if using wildcard or regular expressions for patterns that // cover restricted indices. Implicitly, restricted indices have limited @@ -72,7 +72,7 @@ func (s *UserIndicesPrivileges) UnmarshalJSON(data []byte) error { switch t { case "allow_restricted_indices": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/userprofile.go b/typedapi/types/userprofile.go index c03fb72b8c..54138f0c3e 100644 --- a/typedapi/types/userprofile.go +++ b/typedapi/types/userprofile.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // UserProfile type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/UserProfile.ts#L42-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/UserProfile.ts#L42-L48 type UserProfile struct { Data map[string]json.RawMessage `json:"data"` Enabled *bool `json:"enabled,omitempty"` @@ -64,7 +64,7 @@ func (s *UserProfile) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/userprofilehitmetadata.go b/typedapi/types/userprofilehitmetadata.go index ed247ab2e4..fc6bce8ac3 100644 --- a/typedapi/types/userprofilehitmetadata.go +++ b/typedapi/types/userprofilehitmetadata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // UserProfileHitMetadata type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/UserProfile.ts#L28-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/UserProfile.ts#L28-L31 type UserProfileHitMetadata struct { PrimaryTerm_ int64 `json:"_primary_term"` SeqNo_ int64 `json:"_seq_no"` @@ -53,7 +53,7 @@ func (s *UserProfileHitMetadata) UnmarshalJSON(data []byte) error { switch t { case "_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/userprofileuser.go b/typedapi/types/userprofileuser.go index aaaf1a221b..16337f5d78 100644 --- a/typedapi/types/userprofileuser.go +++ b/typedapi/types/userprofileuser.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,10 +31,10 @@ import ( // UserProfileUser type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/UserProfile.ts#L33-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/UserProfile.ts#L33-L40 type UserProfileUser struct { - Email string `json:"email,omitempty"` - FullName string `json:"full_name,omitempty"` + Email *string `json:"email,omitempty"` + FullName *string `json:"full_name,omitempty"` RealmDomain *string `json:"realm_domain,omitempty"` RealmName string `json:"realm_name"` Roles []string `json:"roles"` @@ -66,7 +66,7 @@ func (s *UserProfileUser) UnmarshalJSON(data []byte) error { if err != nil { o = string(tmp[:]) } - s.Email = o + s.Email = &o case "full_name": if err := dec.Decode(&s.FullName); err != nil { diff --git a/typedapi/types/userprofilewithmetadata.go b/typedapi/types/userprofilewithmetadata.go index ce79a9726f..96e578e541 100644 --- a/typedapi/types/userprofilewithmetadata.go +++ b/typedapi/types/userprofilewithmetadata.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // UserProfileWithMetadata type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/_types/UserProfile.ts#L50-L53 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/_types/UserProfile.ts#L50-L53 type UserProfileWithMetadata struct { Data map[string]json.RawMessage `json:"data"` Doc_ UserProfileHitMetadata `json:"_doc"` @@ -71,7 +71,7 @@ func (s *UserProfileWithMetadata) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -93,7 +93,7 @@ func (s *UserProfileWithMetadata) UnmarshalJSON(data []byte) error { } case "last_synchronized": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/userrealm.go b/typedapi/types/userrealm.go index 567ce09fc6..602d2c5ce3 100644 --- a/typedapi/types/userrealm.go +++ b/typedapi/types/userrealm.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // UserRealm type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/security/get_token/types.ts#L30-L33 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/security/get_token/types.ts#L30-L33 type UserRealm struct { Name string `json:"name"` Type string `json:"type"` diff --git a/typedapi/types/validationloss.go b/typedapi/types/validationloss.go index 895f7d6414..cad97316a1 100644 --- a/typedapi/types/validationloss.go +++ b/typedapi/types/validationloss.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ValidationLoss type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/DataframeAnalytics.ts#L570-L575 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/DataframeAnalytics.ts#L570-L575 type ValidationLoss struct { // FoldValues Validation loss values for every added decision tree during the forest // growing procedure. diff --git a/typedapi/types/valuecountaggregate.go b/typedapi/types/valuecountaggregate.go index 6809c81269..b09025adb4 100644 --- a/typedapi/types/valuecountaggregate.go +++ b/typedapi/types/valuecountaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,14 +31,14 @@ import ( // ValueCountAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L218-L222 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L218-L222 type ValueCountAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. - Value Float64 `json:"value,omitempty"` - ValueAsString *string `json:"value_as_string,omitempty"` + Value *Float64 `json:"value,omitempty"` + ValueAsString *string `json:"value_as_string,omitempty"` } func (s *ValueCountAggregate) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/valuecountaggregation.go b/typedapi/types/valuecountaggregation.go index d69b670f01..8b949aef49 100644 --- a/typedapi/types/valuecountaggregation.go +++ b/typedapi/types/valuecountaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ValueCountAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L417-L417 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L422-L422 type ValueCountAggregation struct { // Field The field on which to run the aggregation. Field *string `json:"field,omitempty"` diff --git a/typedapi/types/variablewidthhistogramaggregate.go b/typedapi/types/variablewidthhistogramaggregate.go index 8bc073a2fc..8af7f9fc8c 100644 --- a/typedapi/types/variablewidthhistogramaggregate.go +++ b/typedapi/types/variablewidthhistogramaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // VariableWidthHistogramAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L362-L364 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L364-L366 type VariableWidthHistogramAggregate struct { Buckets BucketsVariableWidthHistogramBucket `json:"buckets"` Meta Metadata `json:"meta,omitempty"` diff --git a/typedapi/types/variablewidthhistogramaggregation.go b/typedapi/types/variablewidthhistogramaggregation.go index 6c28c7c393..2d36eefc25 100644 --- a/typedapi/types/variablewidthhistogramaggregation.go +++ b/typedapi/types/variablewidthhistogramaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // VariableWidthHistogramAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/bucket.ts#L1015-L1035 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/bucket.ts#L1022-L1043 type VariableWidthHistogramAggregation struct { // Buckets The target number of buckets. Buckets *int `json:"buckets,omitempty"` @@ -40,7 +40,8 @@ type VariableWidthHistogramAggregation struct { // InitialBuffer Specifies the number of individual documents that will be stored in memory on // a shard before the initial bucketing algorithm is run. // Defaults to `min(10 * shard_size, 50000)`. - InitialBuffer *int `json:"initial_buffer,omitempty"` + InitialBuffer *int `json:"initial_buffer,omitempty"` + Script Script `json:"script,omitempty"` // ShardSize The number of buckets that the coordinating node will request from each // shard. // Defaults to `buckets * 50`. @@ -64,7 +65,7 @@ func (s *VariableWidthHistogramAggregation) UnmarshalJSON(data []byte) error { case "buckets": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -85,7 +86,7 @@ func (s *VariableWidthHistogramAggregation) UnmarshalJSON(data []byte) error { case "initial_buffer": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -99,9 +100,45 @@ func (s *VariableWidthHistogramAggregation) UnmarshalJSON(data []byte) error { s.InitialBuffer = &f } + case "script": + message := json.RawMessage{} + if err := dec.Decode(&message); err != nil { + return fmt.Errorf("%s | %w", "Script", err) + } + keyDec := json.NewDecoder(bytes.NewReader(message)) + for { + t, err := keyDec.Token() + if err != nil { + if errors.Is(err, io.EOF) { + break + } + return fmt.Errorf("%s | %w", "Script", err) + } + + switch t { + + case "lang", "options", "source": + o := NewInlineScript() + localDec := json.NewDecoder(bytes.NewReader(message)) + if err := localDec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Script", err) + } + s.Script = o + + case "id": + o := NewStoredScriptId() + localDec := json.NewDecoder(bytes.NewReader(message)) + if err := localDec.Decode(&o); err != nil { + return fmt.Errorf("%s | %w", "Script", err) + } + s.Script = o + + } + } + case "shard_size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/variablewidthhistogrambucket.go b/typedapi/types/variablewidthhistogrambucket.go index 31d1de5949..f064b89049 100644 --- a/typedapi/types/variablewidthhistogrambucket.go +++ b/typedapi/types/variablewidthhistogrambucket.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // VariableWidthHistogramBucket type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L366-L373 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L368-L375 type VariableWidthHistogramBucket struct { Aggregations map[string]Aggregate `json:"-"` DocCount int64 `json:"doc_count"` @@ -60,7 +60,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { switch t { case "doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -75,7 +75,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { } case "key": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -103,7 +103,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { s.KeyAsString = &o case "max": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -131,7 +131,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { s.MaxAsString = &o case "min": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -603,7 +603,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { } s.Aggregations[elems[1]] = o - case "box_plot": + case "boxplot": o := NewBoxPlotAggregate() if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) @@ -653,7 +653,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { s.Aggregations[elems[1]] = o default: - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -663,7 +663,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { return errors.New("cannot decode JSON for field Aggregations") } } else { - o := make(map[string]interface{}, 0) + o := make(map[string]any, 0) if err := dec.Decode(&o); err != nil { return fmt.Errorf("%s | %w", "Aggregations", err) } @@ -680,7 +680,7 @@ func (s *VariableWidthHistogramBucket) UnmarshalJSON(data []byte) error { func (s VariableWidthHistogramBucket) MarshalJSON() ([]byte, error) { type opt VariableWidthHistogramBucket // We transform the struct to a map without the embedded additional properties map - tmp := make(map[string]interface{}, 0) + tmp := make(map[string]any, 0) data, err := json.Marshal(opt(s)) if err != nil { diff --git a/typedapi/types/vector.go b/typedapi/types/vector.go index 940a77867c..e2db6812ce 100644 --- a/typedapi/types/vector.go +++ b/typedapi/types/vector.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Vector type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L454-L458 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L454-L458 type Vector struct { Available bool `json:"available"` DenseVectorDimsAvgCount int `json:"dense_vector_dims_avg_count"` @@ -56,7 +56,7 @@ func (s *Vector) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -71,7 +71,7 @@ func (s *Vector) UnmarshalJSON(data []byte) error { case "dense_vector_dims_avg_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *Vector) UnmarshalJSON(data []byte) error { case "dense_vector_fields_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -102,7 +102,7 @@ func (s *Vector) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -117,7 +117,7 @@ func (s *Vector) UnmarshalJSON(data []byte) error { case "sparse_vector_fields_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/verifyindex.go b/typedapi/types/verifyindex.go index 8ea418b156..915cce04b2 100644 --- a/typedapi/types/verifyindex.go +++ b/typedapi/types/verifyindex.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // VerifyIndex type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/indices/recovery/types.ts#L111-L116 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/indices/recovery/types.ts#L111-L116 type VerifyIndex struct { CheckIndexTime Duration `json:"check_index_time,omitempty"` CheckIndexTimeInMillis int64 `json:"check_index_time_in_millis"` diff --git a/typedapi/types/versionproperty.go b/typedapi/types/versionproperty.go index 8ad32da0a9..b169ff4fc8 100644 --- a/typedapi/types/versionproperty.go +++ b/typedapi/types/versionproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // VersionProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L273-L275 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L287-L289 type VersionProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -80,7 +80,7 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -105,7 +105,7 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -126,7 +126,7 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -216,12 +216,6 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -240,6 +234,18 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -396,6 +402,12 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -407,7 +419,7 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -436,7 +448,7 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -457,7 +469,7 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -547,12 +559,6 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -571,6 +577,18 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -727,6 +745,12 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -749,7 +773,7 @@ func (s *VersionProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/vertex.go b/typedapi/types/vertex.go index 5ed6b5b9d4..24ed1b012f 100644 --- a/typedapi/types/vertex.go +++ b/typedapi/types/vertex.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Vertex type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/graph/_types/Vertex.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/graph/_types/Vertex.ts#L23-L28 type Vertex struct { Depth int64 `json:"depth"` Field string `json:"field"` @@ -55,7 +55,7 @@ func (s *Vertex) UnmarshalJSON(data []byte) error { switch t { case "depth": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -87,7 +87,7 @@ func (s *Vertex) UnmarshalJSON(data []byte) error { s.Term = o case "weight": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/vertexdefinition.go b/typedapi/types/vertexdefinition.go index ba974a6ec6..3f967e7170 100644 --- a/typedapi/types/vertexdefinition.go +++ b/typedapi/types/vertexdefinition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // VertexDefinition type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/graph/_types/Vertex.ts#L30-L59 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/graph/_types/Vertex.ts#L30-L59 type VertexDefinition struct { // Exclude Prevents the specified terms from being included in the results. Exclude []string `json:"exclude,omitempty"` @@ -82,7 +82,7 @@ func (s *VertexDefinition) UnmarshalJSON(data []byte) error { } case "min_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -97,7 +97,7 @@ func (s *VertexDefinition) UnmarshalJSON(data []byte) error { } case "shard_min_doc_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -113,7 +113,7 @@ func (s *VertexDefinition) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/vertexinclude.go b/typedapi/types/vertexinclude.go index ed750a7fb4..7843824596 100644 --- a/typedapi/types/vertexinclude.go +++ b/typedapi/types/vertexinclude.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // VertexInclude type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/graph/_types/Vertex.ts#L61-L64 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/graph/_types/Vertex.ts#L61-L64 type VertexInclude struct { Boost Float64 `json:"boost"` Term string `json:"term"` @@ -53,7 +53,7 @@ func (s *VertexInclude) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/vocabulary.go b/typedapi/types/vocabulary.go index eca81e3391..b0600fe7fd 100644 --- a/typedapi/types/vocabulary.go +++ b/typedapi/types/vocabulary.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // Vocabulary type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L233-L235 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L233-L235 type Vocabulary struct { Index string `json:"index"` } diff --git a/typedapi/types/waitforactiveshards.go b/typedapi/types/waitforactiveshards.go index 8b9151d59d..831a3f2756 100644 --- a/typedapi/types/waitforactiveshards.go +++ b/typedapi/types/waitforactiveshards.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -25,5 +25,5 @@ package types // int // waitforactiveshardoptions.WaitForActiveShardOptions // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/common.ts#L142-L143 -type WaitForActiveShards interface{} +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/common.ts#L142-L143 +type WaitForActiveShards any diff --git a/typedapi/types/warmerstats.go b/typedapi/types/warmerstats.go index 7a84e1d6cb..fa713e8cee 100644 --- a/typedapi/types/warmerstats.go +++ b/typedapi/types/warmerstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // WarmerStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Stats.ts#L407-L412 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Stats.ts#L407-L412 type WarmerStats struct { Current int64 `json:"current"` Total int64 `json:"total"` @@ -55,7 +55,7 @@ func (s *WarmerStats) UnmarshalJSON(data []byte) error { switch t { case "current": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *WarmerStats) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/watch.go b/typedapi/types/watch.go index 0e1093ea80..4d0b88b631 100644 --- a/typedapi/types/watch.go +++ b/typedapi/types/watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // Watch type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Watch.ts#L37-L47 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Watch.ts#L37-L47 type Watch struct { Actions map[string]WatcherAction `json:"actions"` Condition WatcherCondition `json:"condition"` diff --git a/typedapi/types/watcher.go b/typedapi/types/watcher.go index 5d6a8adce8..e105e71847 100644 --- a/typedapi/types/watcher.go +++ b/typedapi/types/watcher.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Watcher type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L460-L464 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L460-L464 type Watcher struct { Available bool `json:"available"` Count Counter `json:"count"` @@ -56,7 +56,7 @@ func (s *Watcher) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -75,7 +75,7 @@ func (s *Watcher) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/watcheraction.go b/typedapi/types/watcheraction.go index ac4fc278a2..17238a2745 100644 --- a/typedapi/types/watcheraction.go +++ b/typedapi/types/watcheraction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // WatcherAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Action.ts#L41-L60 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Action.ts#L41-L60 type WatcherAction struct { ActionType *actiontype.ActionType `json:"action_type,omitempty"` Condition *WatcherCondition `json:"condition,omitempty"` @@ -105,7 +105,7 @@ func (s *WatcherAction) UnmarshalJSON(data []byte) error { case "max_iterations": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/watcheractions.go b/typedapi/types/watcheractions.go index 5c0a6233c6..5850f2becb 100644 --- a/typedapi/types/watcheractions.go +++ b/typedapi/types/watcheractions.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // WatcherActions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L396-L398 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L396-L398 type WatcherActions struct { Actions map[string]WatcherActionTotals `json:"actions"` } diff --git a/typedapi/types/watcheractiontotals.go b/typedapi/types/watcheractiontotals.go index 0f2048e746..de1d7c5681 100644 --- a/typedapi/types/watcheractiontotals.go +++ b/typedapi/types/watcheractiontotals.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // WatcherActionTotals type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L412-L415 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L412-L415 type WatcherActionTotals struct { Total Duration `json:"total"` TotalTimeInMs int64 `json:"total_time_in_ms"` diff --git a/typedapi/types/watchercondition.go b/typedapi/types/watchercondition.go index 296f4e2c1d..38e06ef63d 100644 --- a/typedapi/types/watchercondition.go +++ b/typedapi/types/watchercondition.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // WatcherCondition type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Conditions.ts#L47-L59 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Conditions.ts#L50-L62 type WatcherCondition struct { Always *AlwaysCondition `json:"always,omitempty"` ArrayCompare map[string]ArrayCompareCondition `json:"array_compare,omitempty"` diff --git a/typedapi/types/watcherinput.go b/typedapi/types/watcherinput.go index cc1ef26e31..b4151b1341 100644 --- a/typedapi/types/watcherinput.go +++ b/typedapi/types/watcherinput.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -26,7 +26,7 @@ import ( // WatcherInput type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Input.ts#L90-L98 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Input.ts#L90-L98 type WatcherInput struct { Chain *ChainInput `json:"chain,omitempty"` Http *HttpInput `json:"http,omitempty"` diff --git a/typedapi/types/watchernodestats.go b/typedapi/types/watchernodestats.go index 22967c868c..a83d1b2a5f 100644 --- a/typedapi/types/watchernodestats.go +++ b/typedapi/types/watchernodestats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // WatcherNodeStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/stats/types.ts#L33-L40 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/stats/types.ts#L33-L40 type WatcherNodeStats struct { CurrentWatches []WatchRecordStats `json:"current_watches,omitempty"` ExecutionThreadPool ExecutionThreadPool `json:"execution_thread_pool"` @@ -79,7 +79,7 @@ func (s *WatcherNodeStats) UnmarshalJSON(data []byte) error { } case "watch_count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/watcherstatusactions.go b/typedapi/types/watcherstatusactions.go index c2e1c5f61c..2a54fe278c 100644 --- a/typedapi/types/watcherstatusactions.go +++ b/typedapi/types/watcherstatusactions.go @@ -16,11 +16,11 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // WatcherStatusActions type alias. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Action.ts#L62-L62 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Action.ts#L62-L62 type WatcherStatusActions map[string]ActionStatus diff --git a/typedapi/types/watcherwatch.go b/typedapi/types/watcherwatch.go index d3aa49d778..c4b8b742e3 100644 --- a/typedapi/types/watcherwatch.go +++ b/typedapi/types/watcherwatch.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // WatcherWatch type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L400-L405 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L400-L405 type WatcherWatch struct { Action map[string]Counter `json:"action,omitempty"` Condition map[string]Counter `json:"condition,omitempty"` diff --git a/typedapi/types/watcherwatchtrigger.go b/typedapi/types/watcherwatchtrigger.go index febbfdbedc..0a5daef6a8 100644 --- a/typedapi/types/watcherwatchtrigger.go +++ b/typedapi/types/watcherwatchtrigger.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // WatcherWatchTrigger type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L407-L410 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L407-L410 type WatcherWatchTrigger struct { All_ Counter `json:"_all"` Schedule *WatcherWatchTriggerSchedule `json:"schedule,omitempty"` diff --git a/typedapi/types/watcherwatchtriggerschedule.go b/typedapi/types/watcherwatchtriggerschedule.go index 4732b64da1..7728352764 100644 --- a/typedapi/types/watcherwatchtriggerschedule.go +++ b/typedapi/types/watcherwatchtriggerschedule.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // WatcherWatchTriggerSchedule type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L466-L469 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L466-L469 type WatcherWatchTriggerSchedule struct { Active int64 `json:"active"` All_ Counter `json:"_all"` @@ -55,7 +55,7 @@ func (s *WatcherWatchTriggerSchedule) UnmarshalJSON(data []byte) error { switch t { case "active": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -80,7 +80,7 @@ func (s *WatcherWatchTriggerSchedule) UnmarshalJSON(data []byte) error { } case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/watchrecord.go b/typedapi/types/watchrecord.go index ac801ac659..6589131161 100644 --- a/typedapi/types/watchrecord.go +++ b/typedapi/types/watchrecord.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // WatchRecord type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/execute_watch/types.ts#L27-L39 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/execute_watch/types.ts#L27-L39 type WatchRecord struct { Condition WatcherCondition `json:"condition"` Input WatcherInput `json:"input"` diff --git a/typedapi/types/watchrecordqueuedstats.go b/typedapi/types/watchrecordqueuedstats.go index a5acd1eb90..224f8b1d4d 100644 --- a/typedapi/types/watchrecordqueuedstats.go +++ b/typedapi/types/watchrecordqueuedstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // WatchRecordQueuedStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/stats/types.ts#L50-L52 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/stats/types.ts#L50-L52 type WatchRecordQueuedStats struct { ExecutionTime DateTime `json:"execution_time"` } diff --git a/typedapi/types/watchrecordstats.go b/typedapi/types/watchrecordstats.go index a463a4a259..9b9610bd7d 100644 --- a/typedapi/types/watchrecordstats.go +++ b/typedapi/types/watchrecordstats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -32,7 +32,7 @@ import ( // WatchRecordStats type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/stats/types.ts#L54-L60 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/stats/types.ts#L54-L60 type WatchRecordStats struct { ExecutedActions []string `json:"executed_actions,omitempty"` ExecutionPhase executionphase.ExecutionPhase `json:"execution_phase"` diff --git a/typedapi/types/watchstatus.go b/typedapi/types/watchstatus.go index 781aed3ad1..a871845182 100644 --- a/typedapi/types/watchstatus.go +++ b/typedapi/types/watchstatus.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // WatchStatus type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Watch.ts#L49-L56 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Watch.ts#L49-L56 type WatchStatus struct { Actions WatcherStatusActions `json:"actions"` ExecutionState *string `json:"execution_state,omitempty"` diff --git a/typedapi/types/webhookaction.go b/typedapi/types/webhookaction.go index 25dcd7ba58..b6a3c8a902 100644 --- a/typedapi/types/webhookaction.go +++ b/typedapi/types/webhookaction.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -34,7 +34,7 @@ import ( // WebhookAction type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L293-L293 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L293-L293 type WebhookAction struct { Auth *HttpInputAuthentication `json:"auth,omitempty"` Body *string `json:"body,omitempty"` diff --git a/typedapi/types/webhookresult.go b/typedapi/types/webhookresult.go index 873c4f9287..db395d72c7 100644 --- a/typedapi/types/webhookresult.go +++ b/typedapi/types/webhookresult.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // WebhookResult type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/_types/Actions.ts#L295-L298 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/_types/Actions.ts#L295-L298 type WebhookResult struct { Request HttpInputRequestResult `json:"request"` Response *HttpInputResponseResult `json:"response,omitempty"` diff --git a/typedapi/types/weightedaverageaggregation.go b/typedapi/types/weightedaverageaggregation.go index 1ffdbaf96d..bcf8005af6 100644 --- a/typedapi/types/weightedaverageaggregation.go +++ b/typedapi/types/weightedaverageaggregation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,12 +33,10 @@ import ( // WeightedAverageAggregation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L432-L446 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L437-L451 type WeightedAverageAggregation struct { // Format A numeric response formatter. - Format *string `json:"format,omitempty"` - Meta Metadata `json:"meta,omitempty"` - Name *string `json:"name,omitempty"` + Format *string `json:"format,omitempty"` // Value Configuration for the field that provides the values. Value *WeightedAverageValue `json:"value,omitempty"` ValueType *valuetype.ValueType `json:"value_type,omitempty"` @@ -73,23 +71,6 @@ func (s *WeightedAverageAggregation) UnmarshalJSON(data []byte) error { } s.Format = &o - case "meta": - if err := dec.Decode(&s.Meta); err != nil { - return fmt.Errorf("%s | %w", "Meta", err) - } - - case "name": - var tmp json.RawMessage - if err := dec.Decode(&tmp); err != nil { - return fmt.Errorf("%s | %w", "Name", err) - } - o := string(tmp[:]) - o, err = strconv.Unquote(o) - if err != nil { - o = string(tmp[:]) - } - s.Name = &o - case "value": if err := dec.Decode(&s.Value); err != nil { return fmt.Errorf("%s | %w", "Value", err) diff --git a/typedapi/types/weightedaveragevalue.go b/typedapi/types/weightedaveragevalue.go index b6dcb6ad5c..0c9f9041a0 100644 --- a/typedapi/types/weightedaveragevalue.go +++ b/typedapi/types/weightedaveragevalue.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // WeightedAverageValue type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/metric.ts#L448-L458 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/metric.ts#L453-L463 type WeightedAverageValue struct { // Field The field from which to extract the values or weights. Field *string `json:"field,omitempty"` @@ -61,7 +61,7 @@ func (s *WeightedAverageValue) UnmarshalJSON(data []byte) error { } case "missing": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/weightedavgaggregate.go b/typedapi/types/weightedavgaggregate.go index fd2600106a..6189431582 100644 --- a/typedapi/types/weightedavgaggregate.go +++ b/typedapi/types/weightedavgaggregate.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,14 +31,14 @@ import ( // WeightedAvgAggregate type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/aggregations/Aggregate.ts#L212-L216 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/aggregations/Aggregate.ts#L212-L216 type WeightedAvgAggregate struct { Meta Metadata `json:"meta,omitempty"` // Value The metric value. A missing value generally means that there was no data to // aggregate, // unless specified otherwise. - Value Float64 `json:"value,omitempty"` - ValueAsString *string `json:"value_as_string,omitempty"` + Value *Float64 `json:"value,omitempty"` + ValueAsString *string `json:"value_as_string,omitempty"` } func (s *WeightedAvgAggregate) UnmarshalJSON(data []byte) error { diff --git a/typedapi/types/weightedtokensquery.go b/typedapi/types/weightedtokensquery.go index 6174ecd2e2..c0e807e81a 100644 --- a/typedapi/types/weightedtokensquery.go +++ b/typedapi/types/weightedtokensquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // WeightedTokensQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/WeightedTokensQuery.ts#L27-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/WeightedTokensQuery.ts#L27-L32 type WeightedTokensQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -62,7 +62,7 @@ func (s *WeightedTokensQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/weights.go b/typedapi/types/weights.go index 927e271550..5d7bade43c 100644 --- a/typedapi/types/weights.go +++ b/typedapi/types/weights.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // Weights type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/put_trained_model/types.ts#L108-L110 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/put_trained_model/types.ts#L108-L110 type Weights struct { Weights Float64 `json:"weights"` } @@ -52,7 +52,7 @@ func (s *Weights) UnmarshalJSON(data []byte) error { switch t { case "weights": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/whitespaceanalyzer.go b/typedapi/types/whitespaceanalyzer.go index 5b336afcad..936e18207e 100644 --- a/typedapi/types/whitespaceanalyzer.go +++ b/typedapi/types/whitespaceanalyzer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -30,7 +30,7 @@ import ( // WhitespaceAnalyzer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/analyzers.ts#L108-L111 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/analyzers.ts#L108-L111 type WhitespaceAnalyzer struct { Type string `json:"type,omitempty"` Version *string `json:"version,omitempty"` diff --git a/typedapi/types/whitespacetokenizer.go b/typedapi/types/whitespacetokenizer.go index aad3e82f7f..e1e66d7db4 100644 --- a/typedapi/types/whitespacetokenizer.go +++ b/typedapi/types/whitespacetokenizer.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // WhitespaceTokenizer type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/tokenizers.ts#L115-L118 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/tokenizers.ts#L115-L118 type WhitespaceTokenizer struct { MaxTokenLength *int `json:"max_token_length,omitempty"` Type string `json:"type,omitempty"` @@ -55,7 +55,7 @@ func (s *WhitespaceTokenizer) UnmarshalJSON(data []byte) error { case "max_token_length": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/wildcardproperty.go b/typedapi/types/wildcardproperty.go index e6af357a8b..b1aa6c99f0 100644 --- a/typedapi/types/wildcardproperty.go +++ b/typedapi/types/wildcardproperty.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // WildcardProperty type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/mapping/core.ts#L277-L284 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/mapping/core.ts#L291-L298 type WildcardProperty struct { CopyTo []string `json:"copy_to,omitempty"` DocValues *bool `json:"doc_values,omitempty"` @@ -81,7 +81,7 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { } case "doc_values": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -106,7 +106,7 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -127,7 +127,7 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -217,12 +217,6 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Fields[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -241,6 +235,18 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -397,6 +403,12 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { return err } s.Fields[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Fields[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -408,7 +420,7 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { case "ignore_above": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -449,7 +461,7 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { refs := make(map[string]json.RawMessage, 0) dec.Decode(&refs) for key, message := range refs { - kind := make(map[string]interface{}) + kind := make(map[string]any) buf := bytes.NewReader(message) localDec := json.NewDecoder(buf) localDec.Decode(&kind) @@ -470,7 +482,7 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "{dynamic_property}": + case "{dynamic_type}": oo := NewDynamicProperty() if err := localDec.Decode(&oo); err != nil { return err @@ -560,12 +572,6 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo - case "sparse_vector": - oo := NewSparseVectorProperty() - if err := localDec.Decode(&oo); err != nil { - return err - } - s.Properties[key] = oo case "flattened": oo := NewFlattenedProperty() if err := localDec.Decode(&oo); err != nil { @@ -584,6 +590,18 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "semantic_text": + oo := NewSemanticTextProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo + case "sparse_vector": + oo := NewSparseVectorProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo case "completion": oo := NewCompletionProperty() if err := localDec.Decode(&oo); err != nil { @@ -740,6 +758,12 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { return err } s.Properties[key] = oo + case "icu_collation_keyword": + oo := NewIcuCollationProperty() + if err := localDec.Decode(&oo); err != nil { + return err + } + s.Properties[key] = oo default: oo := new(Property) if err := localDec.Decode(&oo); err != nil { @@ -762,7 +786,7 @@ func (s *WildcardProperty) UnmarshalJSON(data []byte) error { s.Similarity = &o case "store": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/wildcardquery.go b/typedapi/types/wildcardquery.go index 8d59a10d68..ca48f6e46a 100644 --- a/typedapi/types/wildcardquery.go +++ b/typedapi/types/wildcardquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // WildcardQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/term.ts#L268-L285 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/term.ts#L273-L290 type WildcardQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -79,7 +79,7 @@ func (s *WildcardQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -95,7 +95,7 @@ func (s *WildcardQuery) UnmarshalJSON(data []byte) error { } case "case_insensitive": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/wktgeobounds.go b/typedapi/types/wktgeobounds.go index 2e1306e21d..44aaf035a3 100644 --- a/typedapi/types/wktgeobounds.go +++ b/typedapi/types/wktgeobounds.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // WktGeoBounds type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/Geo.ts#L150-L152 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/Geo.ts#L150-L152 type WktGeoBounds struct { Wkt string `json:"wkt"` } diff --git a/typedapi/types/worddelimitergraphtokenfilter.go b/typedapi/types/worddelimitergraphtokenfilter.go index 1e84ceb4c0..6dad9f011a 100644 --- a/typedapi/types/worddelimitergraphtokenfilter.go +++ b/typedapi/types/worddelimitergraphtokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // WordDelimiterGraphTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L149-L166 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L151-L168 type WordDelimiterGraphTokenFilter struct { AdjustOffsets *bool `json:"adjust_offsets,omitempty"` CatenateAll *bool `json:"catenate_all,omitempty"` @@ -68,7 +68,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { switch t { case "adjust_offsets": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -82,7 +82,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { } case "catenate_all": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -96,7 +96,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { } case "catenate_numbers": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -110,7 +110,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { } case "catenate_words": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -124,7 +124,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { } case "generate_number_parts": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -138,7 +138,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { } case "generate_word_parts": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -152,7 +152,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { } case "ignore_keywords": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -188,7 +188,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { s.ProtectedWordsPath = &o case "split_on_case_change": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -202,7 +202,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { } case "split_on_numerics": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -216,7 +216,7 @@ func (s *WordDelimiterGraphTokenFilter) UnmarshalJSON(data []byte) error { } case "stem_english_possessive": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/worddelimitertokenfilter.go b/typedapi/types/worddelimitertokenfilter.go index cffa570ccd..fab7b5a615 100644 --- a/typedapi/types/worddelimitertokenfilter.go +++ b/typedapi/types/worddelimitertokenfilter.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // WordDelimiterTokenFilter type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/analysis/token_filters.ts#L132-L147 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/analysis/token_filters.ts#L134-L149 type WordDelimiterTokenFilter struct { CatenateAll *bool `json:"catenate_all,omitempty"` CatenateNumbers *bool `json:"catenate_numbers,omitempty"` @@ -66,7 +66,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { switch t { case "catenate_all": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -80,7 +80,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { } case "catenate_numbers": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -94,7 +94,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { } case "catenate_words": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -108,7 +108,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { } case "generate_number_parts": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -122,7 +122,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { } case "generate_word_parts": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -158,7 +158,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { s.ProtectedWordsPath = &o case "split_on_case_change": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -172,7 +172,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { } case "split_on_numerics": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -186,7 +186,7 @@ func (s *WordDelimiterTokenFilter) UnmarshalJSON(data []byte) error { } case "stem_english_possessive": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/wrapperquery.go b/typedapi/types/wrapperquery.go index 4d40e8b84e..4cb689360d 100644 --- a/typedapi/types/wrapperquery.go +++ b/typedapi/types/wrapperquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // WrapperQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_types/query_dsl/abstractions.ts#L481-L487 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_types/query_dsl/abstractions.ts#L501-L507 type WrapperQuery struct { // Boost Floating point number used to decrease or increase the relevance scores of // the query. @@ -61,7 +61,7 @@ func (s *WrapperQuery) UnmarshalJSON(data []byte) error { switch t { case "boost": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/writeoperation.go b/typedapi/types/writeoperation.go index 94ec4c3225..e8a78c84d9 100644 --- a/typedapi/types/writeoperation.go +++ b/typedapi/types/writeoperation.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -33,7 +33,7 @@ import ( // WriteOperation type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/_global/bulk/types.ts#L109-L128 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/_global/bulk/types.ts#L109-L128 type WriteOperation struct { // DynamicTemplates A map from the full name of fields to the name of dynamic templates. // Defaults to an empty map. @@ -91,7 +91,7 @@ func (s *WriteOperation) UnmarshalJSON(data []byte) error { } case "if_primary_term": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -128,7 +128,7 @@ func (s *WriteOperation) UnmarshalJSON(data []byte) error { s.Pipeline = &o case "require_alias": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/xpackdatafeed.go b/typedapi/types/xpackdatafeed.go index 6825cafd90..b69f396ab0 100644 --- a/typedapi/types/xpackdatafeed.go +++ b/typedapi/types/xpackdatafeed.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // XpackDatafeed type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L77-L79 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L77-L79 type XpackDatafeed struct { Count int64 `json:"count"` } @@ -52,7 +52,7 @@ func (s *XpackDatafeed) UnmarshalJSON(data []byte) error { switch t { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/xpackfeature.go b/typedapi/types/xpackfeature.go index 5dfe05c0f6..35d1baa734 100644 --- a/typedapi/types/xpackfeature.go +++ b/typedapi/types/xpackfeature.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // XpackFeature type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/info/types.ts#L77-L82 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/info/types.ts#L77-L82 type XpackFeature struct { Available bool `json:"available"` Description *string `json:"description,omitempty"` @@ -55,7 +55,7 @@ func (s *XpackFeature) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -81,7 +81,7 @@ func (s *XpackFeature) UnmarshalJSON(data []byte) error { s.Description = &o case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/xpackfeatures.go b/typedapi/types/xpackfeatures.go index 20eb5a5208..1c70c64870 100644 --- a/typedapi/types/xpackfeatures.go +++ b/typedapi/types/xpackfeatures.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types // XpackFeatures type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/info/types.ts#L42-L75 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/info/types.ts#L42-L75 type XpackFeatures struct { AggregateMetric XpackFeature `json:"aggregate_metric"` Analytics XpackFeature `json:"analytics"` diff --git a/typedapi/types/xpackquery.go b/typedapi/types/xpackquery.go index 4a03742aaf..98c6ce448a 100644 --- a/typedapi/types/xpackquery.go +++ b/typedapi/types/xpackquery.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // XpackQuery type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L259-L264 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L259-L264 type XpackQuery struct { Count *int `json:"count,omitempty"` Failed *int `json:"failed,omitempty"` @@ -56,7 +56,7 @@ func (s *XpackQuery) UnmarshalJSON(data []byte) error { case "count": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -72,7 +72,7 @@ func (s *XpackQuery) UnmarshalJSON(data []byte) error { case "failed": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -88,7 +88,7 @@ func (s *XpackQuery) UnmarshalJSON(data []byte) error { case "paging": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -104,7 +104,7 @@ func (s *XpackQuery) UnmarshalJSON(data []byte) error { case "total": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/xpackrealm.go b/typedapi/types/xpackrealm.go index b3fd1ea9bb..527b93d8b0 100644 --- a/typedapi/types/xpackrealm.go +++ b/typedapi/types/xpackrealm.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // XpackRealm type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L417-L426 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L417-L426 type XpackRealm struct { Available bool `json:"available"` Cache []RealmCache `json:"cache,omitempty"` @@ -61,7 +61,7 @@ func (s *XpackRealm) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -80,7 +80,7 @@ func (s *XpackRealm) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/xpackrolemapping.go b/typedapi/types/xpackrolemapping.go index 43bfda58d7..1a8d703cdd 100644 --- a/typedapi/types/xpackrolemapping.go +++ b/typedapi/types/xpackrolemapping.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // XpackRoleMapping type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L270-L273 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L270-L273 type XpackRoleMapping struct { Enabled int `json:"enabled"` Size int `json:"size"` @@ -54,7 +54,7 @@ func (s *XpackRoleMapping) UnmarshalJSON(data []byte) error { case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -70,7 +70,7 @@ func (s *XpackRoleMapping) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/xpackruntimefieldtypes.go b/typedapi/types/xpackruntimefieldtypes.go index b79d583a5b..dc19fcf9b0 100644 --- a/typedapi/types/xpackruntimefieldtypes.go +++ b/typedapi/types/xpackruntimefieldtypes.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // XpackRuntimeFieldTypes type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/types.ts#L275-L277 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/types.ts#L275-L277 type XpackRuntimeFieldTypes struct { Available bool `json:"available"` Enabled bool `json:"enabled"` @@ -54,7 +54,7 @@ func (s *XpackRuntimeFieldTypes) UnmarshalJSON(data []byte) error { switch t { case "available": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -68,7 +68,7 @@ func (s *XpackRuntimeFieldTypes) UnmarshalJSON(data []byte) error { } case "enabled": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/zeroshotclassificationinferenceoptions.go b/typedapi/types/zeroshotclassificationinferenceoptions.go index c635d2a1be..ccd74fec77 100644 --- a/typedapi/types/zeroshotclassificationinferenceoptions.go +++ b/typedapi/types/zeroshotclassificationinferenceoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ZeroShotClassificationInferenceOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L201-L222 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L201-L222 type ZeroShotClassificationInferenceOptions struct { // ClassificationLabels The zero shot classification labels indicating entailment, neutral, and // contradiction @@ -88,7 +88,7 @@ func (s *ZeroShotClassificationInferenceOptions) UnmarshalJSON(data []byte) erro } case "multi_label": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/types/zeroshotclassificationinferenceupdateoptions.go b/typedapi/types/zeroshotclassificationinferenceupdateoptions.go index 448185dd88..c360fbd0fd 100644 --- a/typedapi/types/zeroshotclassificationinferenceupdateoptions.go +++ b/typedapi/types/zeroshotclassificationinferenceupdateoptions.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package types @@ -31,7 +31,7 @@ import ( // ZeroShotClassificationInferenceUpdateOptions type. // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/ml/_types/inference.ts#L374-L383 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/ml/_types/inference.ts#L374-L383 type ZeroShotClassificationInferenceUpdateOptions struct { // Labels The labels to predict. Labels []string `json:"labels"` @@ -66,7 +66,7 @@ func (s *ZeroShotClassificationInferenceUpdateOptions) UnmarshalJSON(data []byte } case "multi_label": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/watcher/ackwatch/ack_watch.go b/typedapi/watcher/ackwatch/ack_watch.go index fc69236e33..682e488875 100644 --- a/typedapi/watcher/ackwatch/ack_watch.go +++ b/typedapi/watcher/ackwatch/ack_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Acknowledges a watch, manually throttling the execution of the watch's // actions. @@ -28,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -290,7 +290,7 @@ func (r AckWatch) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -335,3 +335,47 @@ func (r *AckWatch) ActionId(actionid string) *AckWatch { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *AckWatch) ErrorTrace(errortrace bool) *AckWatch { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *AckWatch) FilterPath(filterpaths ...string) *AckWatch { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *AckWatch) Human(human bool) *AckWatch { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *AckWatch) Pretty(pretty bool) *AckWatch { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/watcher/ackwatch/response.go b/typedapi/watcher/ackwatch/response.go index 73e0271126..a35c9b1bad 100644 --- a/typedapi/watcher/ackwatch/response.go +++ b/typedapi/watcher/ackwatch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package ackwatch @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package ackwatch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/ack_watch/WatcherAckWatchResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/ack_watch/WatcherAckWatchResponse.ts#L22-L24 type Response struct { Status types.WatchStatus `json:"status"` } diff --git a/typedapi/watcher/activatewatch/activate_watch.go b/typedapi/watcher/activatewatch/activate_watch.go index 7a867dce4b..17293e3dde 100644 --- a/typedapi/watcher/activatewatch/activate_watch.go +++ b/typedapi/watcher/activatewatch/activate_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Activates a currently inactive watch. package activatewatch @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r ActivateWatch) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -300,3 +300,47 @@ func (r *ActivateWatch) _watchid(watchid string) *ActivateWatch { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ActivateWatch) ErrorTrace(errortrace bool) *ActivateWatch { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ActivateWatch) FilterPath(filterpaths ...string) *ActivateWatch { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ActivateWatch) Human(human bool) *ActivateWatch { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ActivateWatch) Pretty(pretty bool) *ActivateWatch { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/watcher/activatewatch/response.go b/typedapi/watcher/activatewatch/response.go index f4565c8b3c..b83b5c53e6 100644 --- a/typedapi/watcher/activatewatch/response.go +++ b/typedapi/watcher/activatewatch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package activatewatch @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package activatewatch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/activate_watch/WatcherActivateWatchResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/activate_watch/WatcherActivateWatchResponse.ts#L22-L24 type Response struct { Status types.ActivationStatus `json:"status"` } diff --git a/typedapi/watcher/deactivatewatch/deactivate_watch.go b/typedapi/watcher/deactivatewatch/deactivate_watch.go index d0e5e97792..2b478d90b3 100644 --- a/typedapi/watcher/deactivatewatch/deactivate_watch.go +++ b/typedapi/watcher/deactivatewatch/deactivate_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Deactivates a currently active watch. package deactivatewatch @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -264,7 +264,7 @@ func (r DeactivateWatch) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -300,3 +300,47 @@ func (r *DeactivateWatch) _watchid(watchid string) *DeactivateWatch { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeactivateWatch) ErrorTrace(errortrace bool) *DeactivateWatch { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeactivateWatch) FilterPath(filterpaths ...string) *DeactivateWatch { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeactivateWatch) Human(human bool) *DeactivateWatch { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeactivateWatch) Pretty(pretty bool) *DeactivateWatch { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/watcher/deactivatewatch/response.go b/typedapi/watcher/deactivatewatch/response.go index 8b8a7d7fe3..31c6a55f39 100644 --- a/typedapi/watcher/deactivatewatch/response.go +++ b/typedapi/watcher/deactivatewatch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deactivatewatch @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package deactivatewatch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/deactivate_watch/DeactivateWatchResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/deactivate_watch/DeactivateWatchResponse.ts#L22-L24 type Response struct { Status types.ActivationStatus `json:"status"` } diff --git a/typedapi/watcher/deletewatch/delete_watch.go b/typedapi/watcher/deletewatch/delete_watch.go index 7f83401298..1d079c7dc4 100644 --- a/typedapi/watcher/deletewatch/delete_watch.go +++ b/typedapi/watcher/deletewatch/delete_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Removes a watch from Watcher. package deletewatch @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -262,7 +262,7 @@ func (r DeleteWatch) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -298,3 +298,47 @@ func (r *DeleteWatch) _id(id string) *DeleteWatch { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *DeleteWatch) ErrorTrace(errortrace bool) *DeleteWatch { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *DeleteWatch) FilterPath(filterpaths ...string) *DeleteWatch { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *DeleteWatch) Human(human bool) *DeleteWatch { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *DeleteWatch) Pretty(pretty bool) *DeleteWatch { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/watcher/deletewatch/response.go b/typedapi/watcher/deletewatch/response.go index 2f09c4d694..5a54ba61d0 100644 --- a/typedapi/watcher/deletewatch/response.go +++ b/typedapi/watcher/deletewatch/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package deletewatch // Response holds the response body struct for the package deletewatch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/delete_watch/DeleteWatchResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/delete_watch/DeleteWatchResponse.ts#L22-L24 type Response struct { Found bool `json:"found"` Id_ string `json:"_id"` diff --git a/typedapi/watcher/executewatch/execute_watch.go b/typedapi/watcher/executewatch/execute_watch.go index b2f759c6ba..d6abb87ded 100644 --- a/typedapi/watcher/executewatch/execute_watch.go +++ b/typedapi/watcher/executewatch/execute_watch.go @@ -16,9 +16,15 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 - -// Forces the execution of a stored watch. +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed + +// This API can be used to force execution of the watch outside of its +// triggering logic or to simulate the watch execution for debugging purposes. +// For testing and debugging purposes, you also have fine-grained control on how +// the watch runs. You can execute the watch without executing all of its +// actions or alternatively by simulating them. You can also force execution by +// ignoring the watch condition and control whether a watch record would be +// written to the watch history after execution. package executewatch import ( @@ -80,7 +86,13 @@ func NewExecuteWatchFunc(tp elastictransport.Interface) NewExecuteWatch { } } -// Forces the execution of a stored watch. +// This API can be used to force execution of the watch outside of its +// triggering logic or to simulate the watch execution for debugging purposes. +// For testing and debugging purposes, you also have fine-grained control on how +// the watch runs. You can execute the watch without executing all of its +// actions or alternatively by simulating them. You can also force execution by +// ignoring the watch condition and control whether a watch record would be +// written to the watch history after execution. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-execute-watch.html func New(tp elastictransport.Interface) *ExecuteWatch { @@ -333,6 +345,50 @@ func (r *ExecuteWatch) Debug(debug bool) *ExecuteWatch { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *ExecuteWatch) ErrorTrace(errortrace bool) *ExecuteWatch { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *ExecuteWatch) FilterPath(filterpaths ...string) *ExecuteWatch { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *ExecuteWatch) Human(human bool) *ExecuteWatch { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *ExecuteWatch) Pretty(pretty bool) *ExecuteWatch { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // ActionModes Determines how to handle the watch actions as part of the watch execution. // API name: action_modes func (r *ExecuteWatch) ActionModes(actionmodes map[string]actionexecutionmode.ActionExecutionMode) *ExecuteWatch { diff --git a/typedapi/watcher/executewatch/request.go b/typedapi/watcher/executewatch/request.go index f642a81128..001ce41df9 100644 --- a/typedapi/watcher/executewatch/request.go +++ b/typedapi/watcher/executewatch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package executewatch @@ -30,7 +30,7 @@ import ( // Request holds the request body struct for the package executewatch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/execute_watch/WatcherExecuteWatchRequest.ts#L28-L79 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/execute_watch/WatcherExecuteWatchRequest.ts#L28-L79 type Request struct { // ActionModes Determines how to handle the watch actions as part of the watch execution. @@ -61,6 +61,7 @@ func NewRequest() *Request { ActionModes: make(map[string]actionexecutionmode.ActionExecutionMode, 0), AlternativeInput: make(map[string]json.RawMessage, 0), } + return r } diff --git a/typedapi/watcher/executewatch/response.go b/typedapi/watcher/executewatch/response.go index 296052947a..c68ec373a0 100644 --- a/typedapi/watcher/executewatch/response.go +++ b/typedapi/watcher/executewatch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package executewatch @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package executewatch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/execute_watch/WatcherExecuteWatchResponse.ts#L23-L25 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/execute_watch/WatcherExecuteWatchResponse.ts#L23-L25 type Response struct { Id_ string `json:"_id"` WatchRecord types.WatchRecord `json:"watch_record"` diff --git a/typedapi/watcher/getsettings/get_settings.go b/typedapi/watcher/getsettings/get_settings.go index e174b89d5d..09ca4df952 100644 --- a/typedapi/watcher/getsettings/get_settings.go +++ b/typedapi/watcher/getsettings/get_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieve settings for the watcher system index package getsettings @@ -26,7 +26,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strings" @@ -203,7 +202,7 @@ func (r GetSettings) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err diff --git a/typedapi/watcher/getwatch/get_watch.go b/typedapi/watcher/getwatch/get_watch.go index 08cdbc7576..5f278f569b 100644 --- a/typedapi/watcher/getwatch/get_watch.go +++ b/typedapi/watcher/getwatch/get_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves a watch by its ID. package getwatch @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -262,7 +262,7 @@ func (r GetWatch) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -298,3 +298,47 @@ func (r *GetWatch) _id(id string) *GetWatch { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *GetWatch) ErrorTrace(errortrace bool) *GetWatch { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *GetWatch) FilterPath(filterpaths ...string) *GetWatch { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *GetWatch) Human(human bool) *GetWatch { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *GetWatch) Pretty(pretty bool) *GetWatch { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/watcher/getwatch/response.go b/typedapi/watcher/getwatch/response.go index c815b336b2..e032a44bbb 100644 --- a/typedapi/watcher/getwatch/response.go +++ b/typedapi/watcher/getwatch/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package getwatch @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package getwatch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/get_watch/GetWatchResponse.ts#L24-L34 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/get_watch/GetWatchResponse.ts#L24-L34 type Response struct { Found bool `json:"found"` Id_ string `json:"_id"` diff --git a/typedapi/watcher/putwatch/put_watch.go b/typedapi/watcher/putwatch/put_watch.go index 43cf224fac..6a9bffe6eb 100644 --- a/typedapi/watcher/putwatch/put_watch.go +++ b/typedapi/watcher/putwatch/put_watch.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Creates a new watch, or updates an existing one. package putwatch @@ -349,6 +349,50 @@ func (r *PutWatch) Version(versionnumber string) *PutWatch { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *PutWatch) ErrorTrace(errortrace bool) *PutWatch { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *PutWatch) FilterPath(filterpaths ...string) *PutWatch { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *PutWatch) Human(human bool) *PutWatch { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *PutWatch) Pretty(pretty bool) *PutWatch { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // API name: actions func (r *PutWatch) Actions(actions map[string]types.WatcherAction) *PutWatch { diff --git a/typedapi/watcher/putwatch/request.go b/typedapi/watcher/putwatch/request.go index 0e62703358..d98b7ab655 100644 --- a/typedapi/watcher/putwatch/request.go +++ b/typedapi/watcher/putwatch/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putwatch @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package putwatch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/put_watch/WatcherPutWatchRequest.ts#L30-L53 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/put_watch/WatcherPutWatchRequest.ts#L30-L53 type Request struct { Actions map[string]types.WatcherAction `json:"actions,omitempty"` Condition *types.WatcherCondition `json:"condition,omitempty"` @@ -49,6 +49,7 @@ func NewRequest() *Request { r := &Request{ Actions: make(map[string]types.WatcherAction, 0), } + return r } diff --git a/typedapi/watcher/putwatch/response.go b/typedapi/watcher/putwatch/response.go index 95c833b5de..703a0628ab 100644 --- a/typedapi/watcher/putwatch/response.go +++ b/typedapi/watcher/putwatch/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package putwatch // Response holds the response body struct for the package putwatch // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/put_watch/WatcherPutWatchResponse.ts#L23-L31 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/put_watch/WatcherPutWatchResponse.ts#L23-L31 type Response struct { Created bool `json:"created"` Id_ string `json:"_id"` diff --git a/typedapi/watcher/querywatches/query_watches.go b/typedapi/watcher/querywatches/query_watches.go index f90b5bcd27..7d7899234a 100644 --- a/typedapi/watcher/querywatches/query_watches.go +++ b/typedapi/watcher/querywatches/query_watches.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves stored watches. package querywatches @@ -30,6 +30,7 @@ import ( "io" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -293,6 +294,50 @@ func (r *QueryWatches) Header(key, value string) *QueryWatches { return r } +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *QueryWatches) ErrorTrace(errortrace bool) *QueryWatches { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *QueryWatches) FilterPath(filterpaths ...string) *QueryWatches { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *QueryWatches) Human(human bool) *QueryWatches { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *QueryWatches) Pretty(pretty bool) *QueryWatches { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} + // From The offset from the first result to fetch. Needs to be non-negative. // API name: from func (r *QueryWatches) From(from int) *QueryWatches { diff --git a/typedapi/watcher/querywatches/request.go b/typedapi/watcher/querywatches/request.go index 7d16e728ca..f77d4fc098 100644 --- a/typedapi/watcher/querywatches/request.go +++ b/typedapi/watcher/querywatches/request.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package querywatches @@ -33,7 +33,7 @@ import ( // Request holds the request body struct for the package querywatches // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/query_watches/WatcherQueryWatchesRequest.ts#L25-L48 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/query_watches/WatcherQueryWatchesRequest.ts#L25-L48 type Request struct { // From The offset from the first result to fetch. Needs to be non-negative. @@ -51,6 +51,7 @@ type Request struct { // NewRequest returns a Request func NewRequest() *Request { r := &Request{} + return r } @@ -82,7 +83,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "from": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: @@ -108,7 +109,7 @@ func (s *Request) UnmarshalJSON(data []byte) error { case "size": - var tmp interface{} + var tmp any dec.Decode(&tmp) switch v := tmp.(type) { case string: diff --git a/typedapi/watcher/querywatches/response.go b/typedapi/watcher/querywatches/response.go index e889a3a2c2..db4ba7c57b 100644 --- a/typedapi/watcher/querywatches/response.go +++ b/typedapi/watcher/querywatches/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package querywatches @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package querywatches // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/query_watches/WatcherQueryWatchesResponse.ts#L23-L28 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/query_watches/WatcherQueryWatchesResponse.ts#L23-L28 type Response struct { Count int `json:"count"` Watches []types.QueryWatch `json:"watches"` diff --git a/typedapi/watcher/start/response.go b/typedapi/watcher/start/response.go index 9ae2e3f598..edf3782ea4 100644 --- a/typedapi/watcher/start/response.go +++ b/typedapi/watcher/start/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package start // Response holds the response body struct for the package start // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/start/WatcherStartResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/start/WatcherStartResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/watcher/start/start.go b/typedapi/watcher/start/start.go index 834ca9fcc1..987816dde2 100644 --- a/typedapi/watcher/start/start.go +++ b/typedapi/watcher/start/start.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Starts Watcher if it is not already running. package start @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -248,7 +248,7 @@ func (r Start) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +275,47 @@ func (r *Start) Header(key, value string) *Start { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Start) ErrorTrace(errortrace bool) *Start { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Start) FilterPath(filterpaths ...string) *Start { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Start) Human(human bool) *Start { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Start) Pretty(pretty bool) *Start { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/watcher/stats/response.go b/typedapi/watcher/stats/response.go index 3788487be8..7605b6214f 100644 --- a/typedapi/watcher/stats/response.go +++ b/typedapi/watcher/stats/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stats @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package stats // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/stats/WatcherStatsResponse.ts#L24-L32 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/stats/WatcherStatsResponse.ts#L24-L32 type Response struct { ClusterName string `json:"cluster_name"` ManuallyStopped bool `json:"manually_stopped"` diff --git a/typedapi/watcher/stats/stats.go b/typedapi/watcher/stats/stats.go index a8dbf88d55..65103fe600 100644 --- a/typedapi/watcher/stats/stats.go +++ b/typedapi/watcher/stats/stats.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Retrieves the current Watcher metrics. package stats @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -268,7 +267,7 @@ func (r Stats) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -312,3 +311,47 @@ func (r *Stats) EmitStacktraces(emitstacktraces bool) *Stats { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Stats) ErrorTrace(errortrace bool) *Stats { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Stats) FilterPath(filterpaths ...string) *Stats { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Stats) Human(human bool) *Stats { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Stats) Pretty(pretty bool) *Stats { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/watcher/stop/response.go b/typedapi/watcher/stop/response.go index d3b1d64256..97555ac26b 100644 --- a/typedapi/watcher/stop/response.go +++ b/typedapi/watcher/stop/response.go @@ -16,13 +16,13 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package stop // Response holds the response body struct for the package stop // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/watcher/stop/WatcherStopResponse.ts#L22-L24 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/watcher/stop/WatcherStopResponse.ts#L22-L24 type Response struct { // Acknowledged For a successful response, this value is always true. On failure, an diff --git a/typedapi/watcher/stop/stop.go b/typedapi/watcher/stop/stop.go index 89a21f2d87..0d9ac73b4b 100644 --- a/typedapi/watcher/stop/stop.go +++ b/typedapi/watcher/stop/stop.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Stops Watcher if it is running. package stop @@ -27,9 +27,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -248,7 +248,7 @@ func (r Stop) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -275,3 +275,47 @@ func (r *Stop) Header(key, value string) *Stop { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Stop) ErrorTrace(errortrace bool) *Stop { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Stop) FilterPath(filterpaths ...string) *Stop { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Stop) Human(human bool) *Stop { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Stop) Pretty(pretty bool) *Stop { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/watcher/updatesettings/update_settings.go b/typedapi/watcher/updatesettings/update_settings.go index 93f9cac9b9..55e7ec394f 100644 --- a/typedapi/watcher/updatesettings/update_settings.go +++ b/typedapi/watcher/updatesettings/update_settings.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed // Update settings for the watcher system index package updatesettings @@ -26,7 +26,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strings" @@ -203,7 +202,7 @@ func (r UpdateSettings) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err diff --git a/typedapi/xpack/info/info.go b/typedapi/xpack/info/info.go index baa26e9392..09f5fd1049 100644 --- a/typedapi/xpack/info/info.go +++ b/typedapi/xpack/info/info.go @@ -16,9 +16,9 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Retrieves information about the installed X-Pack features. +// Provides general information about the installed X-Pack features. package info import ( @@ -27,7 +27,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" "strconv" @@ -69,7 +68,7 @@ func NewInfoFunc(tp elastictransport.Interface) NewInfo { } } -// Retrieves information about the installed X-Pack features. +// Provides general information about the installed X-Pack features. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/info-api.html func New(tp elastictransport.Interface) *Info { @@ -247,7 +246,7 @@ func (r Info) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -304,3 +303,34 @@ func (r *Info) Human(human bool) *Info { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Info) ErrorTrace(errortrace bool) *Info { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Info) FilterPath(filterpaths ...string) *Info { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Info) Pretty(pretty bool) *Info { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +} diff --git a/typedapi/xpack/info/response.go b/typedapi/xpack/info/response.go index 80d2c14e60..204bc7d8ec 100644 --- a/typedapi/xpack/info/response.go +++ b/typedapi/xpack/info/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package info @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package info // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/info/XPackInfoResponse.ts#L22-L29 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/info/XPackInfoResponse.ts#L22-L29 type Response struct { Build types.BuildInformation `json:"build"` Features types.XpackFeatures `json:"features"` diff --git a/typedapi/xpack/usage/response.go b/typedapi/xpack/usage/response.go index 67335e28d9..7d642d9a5b 100644 --- a/typedapi/xpack/usage/response.go +++ b/typedapi/xpack/usage/response.go @@ -16,7 +16,7 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed package usage @@ -26,7 +26,7 @@ import ( // Response holds the response body struct for the package usage // -// https://github.com/elastic/elasticsearch-specification/blob/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757/specification/xpack/usage/XPackUsageResponse.ts#L43-L79 +// https://github.com/elastic/elasticsearch-specification/blob/cdb84fa39f1401846dab6e1c76781fb3090527ed/specification/xpack/usage/XPackUsageResponse.ts#L43-L79 type Response struct { AggregateMetric types.Base `json:"aggregate_metric"` Analytics types.Analytics `json:"analytics"` diff --git a/typedapi/xpack/usage/usage.go b/typedapi/xpack/usage/usage.go index ed8471ac81..b502d5c0bc 100644 --- a/typedapi/xpack/usage/usage.go +++ b/typedapi/xpack/usage/usage.go @@ -16,9 +16,10 @@ // under the License. // Code generated from the elasticsearch-specification DO NOT EDIT. -// https://github.com/elastic/elasticsearch-specification/tree/5fb8f1ce9c4605abcaa44aa0f17dbfc60497a757 +// https://github.com/elastic/elasticsearch-specification/tree/cdb84fa39f1401846dab6e1c76781fb3090527ed -// Retrieves usage information about the installed X-Pack features. +// This API provides information about which features are currently enabled and +// available under the current license and some usage statistics. package usage import ( @@ -27,9 +28,9 @@ import ( "errors" "fmt" "io" - "io/ioutil" "net/http" "net/url" + "strconv" "strings" "github.com/elastic/elastic-transport-go/v8/elastictransport" @@ -68,7 +69,8 @@ func NewUsageFunc(tp elastictransport.Interface) NewUsage { } } -// Retrieves usage information about the installed X-Pack features. +// This API provides information about which features are currently enabled and +// available under the current license and some usage statistics. // // https://www.elastic.co/guide/en/elasticsearch/reference/current/usage-api.html func New(tp elastictransport.Interface) *Usage { @@ -248,7 +250,7 @@ func (r Usage) IsSuccess(providedCtx context.Context) (bool, error) { if err != nil { return false, err } - io.Copy(ioutil.Discard, res.Body) + io.Copy(io.Discard, res.Body) err = res.Body.Close() if err != nil { return false, err @@ -284,3 +286,47 @@ func (r *Usage) MasterTimeout(duration string) *Usage { return r } + +// ErrorTrace When set to `true` Elasticsearch will include the full stack trace of errors +// when they occur. +// API name: error_trace +func (r *Usage) ErrorTrace(errortrace bool) *Usage { + r.values.Set("error_trace", strconv.FormatBool(errortrace)) + + return r +} + +// FilterPath Comma-separated list of filters in dot notation which reduce the response +// returned by Elasticsearch. +// API name: filter_path +func (r *Usage) FilterPath(filterpaths ...string) *Usage { + tmp := []string{} + for _, item := range filterpaths { + tmp = append(tmp, fmt.Sprintf("%v", item)) + } + r.values.Set("filter_path", strings.Join(tmp, ",")) + + return r +} + +// Human When set to `true` will return statistics in a format suitable for humans. +// For example `"exists_time": "1h"` for humans and +// `"eixsts_time_in_millis": 3600000` for computers. When disabled the human +// readable values will be omitted. This makes sense for responses being +// consumed +// only by machines. +// API name: human +func (r *Usage) Human(human bool) *Usage { + r.values.Set("human", strconv.FormatBool(human)) + + return r +} + +// Pretty If set to `true` the returned JSON will be "pretty-formatted". Only use +// this option for debugging only. +// API name: pretty +func (r *Usage) Pretty(pretty bool) *Usage { + r.values.Set("pretty", strconv.FormatBool(pretty)) + + return r +}